From fab5f47a8656ee5429b72e95d13613757948b464 Mon Sep 17 00:00:00 2001 From: Christoph Knittel Date: Sat, 17 Jan 2026 09:15:45 +0100 Subject: [PATCH 1/3] Remove legacy build system (bsb, ninja) --- .devcontainer/.dockerignore | 6 - .devcontainer/Dockerfile | 2 - .github/workflows/ci.yml | 37 - .gitignore | 3 - AGENTS.md | 14 +- CONTRIBUTING.md | 12 +- CREDITS.md | 4 - Makefile | 41 +- README.md | 3 - .../examples/example-project/src/Json.res | 2 - analysis/examples/larger-project/.merlin | 14 - .../examples/workspace-project/.gitignore | 1 - analysis/vendor/json/Json.ml | 2 - biome.json | 1 - cli/bstracing.js | 270 -- cli/common/bins.js | 3 - cli/common/bsb.js | 506 --- cli/rescript-legacy.js | 135 - cli/rescript-legacy/dump.js | 57 - cli/rescript-legacy/format.js | 255 -- compiler/bsb/bsb.md | 201 -- compiler/bsb/bsb_arg.ml | 122 - compiler/bsb/bsb_arg.mli | 46 - compiler/bsb/bsb_build_schemas.ml | 66 - compiler/bsb/bsb_build_util.ml | 211 -- compiler/bsb/bsb_build_util.mli | 87 - compiler/bsb/bsb_clean.ml | 62 - compiler/bsb/bsb_clean.mli | 31 - compiler/bsb/bsb_config.ml | 72 - compiler/bsb/bsb_config.mli | 49 - compiler/bsb/bsb_config_load.ml | 26 - compiler/bsb/bsb_config_load.mli | 2 - compiler/bsb/bsb_config_parse.ml | 330 -- compiler/bsb/bsb_config_parse.mli | 32 - compiler/bsb/bsb_config_types.ml | 60 - compiler/bsb/bsb_db_encode.ml | 98 - compiler/bsb/bsb_db_encode.mli | 27 - compiler/bsb/bsb_db_util.ml | 101 - compiler/bsb/bsb_db_util.mli | 42 - compiler/bsb/bsb_exception.ml | 92 - compiler/bsb/bsb_exception.mli | 44 - compiler/bsb/bsb_file_groups.ml | 73 - compiler/bsb/bsb_file_groups.mli | 56 - compiler/bsb/bsb_global_paths.ml | 58 - compiler/bsb/bsb_global_paths.mli | 33 - compiler/bsb/bsb_jsx.ml | 57 - compiler/bsb/bsb_log.ml | 98 - compiler/bsb/bsb_log.mli | 45 - compiler/bsb/bsb_namespace_map_gen.ml | 57 - compiler/bsb/bsb_namespace_map_gen.mli | 29 - compiler/bsb/bsb_ninja_check.ml | 157 - compiler/bsb/bsb_ninja_check.mli | 72 - compiler/bsb/bsb_ninja_file_groups.ml | 117 - compiler/bsb/bsb_ninja_file_groups.mli | 32 - compiler/bsb/bsb_ninja_gen.ml | 227 -- compiler/bsb/bsb_ninja_gen.mli | 32 - compiler/bsb/bsb_ninja_global_vars.ml | 32 - compiler/bsb/bsb_ninja_regen.ml | 100 - compiler/bsb/bsb_ninja_regen.mli | 35 - compiler/bsb/bsb_ninja_rule.ml | 229 -- compiler/bsb/bsb_ninja_rule.mli | 83 - compiler/bsb/bsb_ninja_targets.ml | 55 - compiler/bsb/bsb_ninja_targets.mli | 43 - compiler/bsb/bsb_package_kind.ml | 39 - compiler/bsb/bsb_package_specs.ml | 207 -- compiler/bsb/bsb_package_specs.mli | 34 - compiler/bsb/bsb_parse_sources.ml | 438 --- compiler/bsb/bsb_parse_sources.mli | 44 - compiler/bsb/bsb_pkg.ml | 95 - compiler/bsb/bsb_pkg.mli | 38 - compiler/bsb/bsb_pkg_types.ml | 88 - compiler/bsb/bsb_pkg_types.mli | 39 - compiler/bsb/bsb_real_path.ml | 52 - compiler/bsb/bsb_real_path.mli | 25 - compiler/bsb/bsb_regex.ml | 59 - compiler/bsb/bsb_regex.mli | 27 - compiler/bsb/bsb_spec_set.ml | 81 - compiler/bsb/bsb_spec_set.mli | 38 - compiler/bsb/bsb_unix.ml | 84 - compiler/bsb/bsb_unix.mli | 31 - compiler/bsb/bsb_warning.ml | 98 - compiler/bsb/bsb_warning.mli | 44 - compiler/bsb/bsb_watcher_gen.ml | 49 - compiler/bsb/bsb_watcher_gen.mli | 33 - compiler/bsb/bsb_world.ml | 94 - compiler/bsb/bsb_world.mli | 26 - compiler/bsb/data_format.md | 24 - compiler/bsb/dune | 6 - compiler/bsb_exe/dune | 14 - compiler/bsb_exe/rescript_main.ml | 260 -- compiler/bsb_exe/rescript_main.mli | 23 - compiler/bsb_helper/bsb_db_decode.ml | 119 - compiler/bsb_helper/bsb_db_decode.mli | 49 - compiler/bsb_helper/bsb_helper_depfile_gen.ml | 172 - .../bsb_helper/bsb_helper_depfile_gen.mli | 36 - compiler/bsb_helper/dune | 6 - compiler/bsb_helper_exe/bsb_helper_main.ml | 53 - compiler/bsb_helper_exe/bsb_helper_main.mli | 34 - compiler/bsb_helper_exe/dune | 14 - compiler/dune | 4 - compiler/ext/bsb_db.ml | 46 - compiler/ext/bsb_db.mli | 60 - compiler/ext/ext_buffer.ml | 13 - compiler/ext/ext_buffer.mli | 2 - compiler/ext/ext_filename.ml | 11 - compiler/ext/ext_filename.mli | 2 - compiler/ext/ext_json_noloc.ml | 1 - compiler/ext/literals.ml | 4 - compiler/gentype/GenTypeConfig.ml | 2 +- compiler/syntax/src/res_scanner.ml | 1 - docs/docson/build-schema.json | 45 +- docs/reactive_reanalyze_design.md | 2 +- lib_dev/paths.js | 5 - ninja/.clang-format | 25 - ninja/.gitignore | 40 - ninja/COPYING | 202 -- ninja/HACKING.md | 252 -- ninja/README | 21 - ninja/bootstrap.py | 23 - ninja/configure.py | 703 ---- ninja/doc/README.md | 11 - ninja/doc/dblatex.xsl | 7 - ninja/doc/docbook.xsl | 34 - ninja/doc/doxygen.config | 1250 ------- ninja/doc/manual.asciidoc | 1158 ------- ninja/doc/style.css | 29 - ninja/misc/afl-fuzz-tokens/kw_build | 1 - ninja/misc/afl-fuzz-tokens/kw_default | 1 - ninja/misc/afl-fuzz-tokens/kw_include | 1 - ninja/misc/afl-fuzz-tokens/kw_pool | 1 - ninja/misc/afl-fuzz-tokens/kw_rule | 1 - ninja/misc/afl-fuzz-tokens/kw_subninja | 1 - ninja/misc/afl-fuzz-tokens/misc_a | 1 - ninja/misc/afl-fuzz-tokens/misc_b | 1 - ninja/misc/afl-fuzz-tokens/misc_colon | 1 - ninja/misc/afl-fuzz-tokens/misc_cont | 1 - ninja/misc/afl-fuzz-tokens/misc_dollar | 1 - ninja/misc/afl-fuzz-tokens/misc_eq | 1 - ninja/misc/afl-fuzz-tokens/misc_indent | 1 - ninja/misc/afl-fuzz-tokens/misc_pipe | 1 - ninja/misc/afl-fuzz-tokens/misc_pipepipe | 1 - ninja/misc/afl-fuzz-tokens/misc_space | 1 - ninja/misc/afl-fuzz/build.ninja | 5 - ninja/misc/bash-completion | 57 - ninja/misc/ci.py | 41 - ninja/misc/inherited-fds.ninja | 23 - ninja/misc/long-slow-build.ninja | 38 - ninja/misc/measure.py | 56 - ninja/misc/ninja-mode.el | 85 - ninja/misc/ninja.vim | 87 - ninja/misc/ninja_syntax.py | 183 - ninja/misc/ninja_syntax_test.py | 191 - ninja/misc/output_test.py | 103 - ninja/misc/packaging/ninja.spec | 42 - ninja/misc/packaging/rpmbuild.sh | 29 - ninja/misc/write_fake_manifests.py | 272 -- ninja/misc/zsh-completion | 72 - ninja/src/browse.cc | 78 - ninja/src/browse.h | 28 - ninja/src/browse.py | 230 -- ninja/src/build.cc | 1167 ------- ninja/src/build.h | 340 -- ninja/src/build_log.cc | 424 --- ninja/src/build_log.h | 93 - ninja/src/build_log_perftest.cc | 149 - ninja/src/build_log_test.cc | 308 -- ninja/src/build_test.cc | 3079 ----------------- ninja/src/canon_perftest.cc | 57 - ninja/src/clean.cc | 321 -- ninja/src/clean.h | 113 - ninja/src/clean_test.cc | 456 --- ninja/src/clparser.cc | 126 - ninja/src/clparser.h | 52 - ninja/src/clparser_perftest.cc | 157 - ninja/src/clparser_test.cc | 117 - ninja/src/debug_flags.cc | 21 - ninja/src/debug_flags.h | 33 - ninja/src/depfile_parser.cc | 339 -- ninja/src/depfile_parser.h | 52 - ninja/src/depfile_parser.in.cc | 191 - ninja/src/depfile_parser_perftest.cc | 77 - ninja/src/depfile_parser_test.cc | 311 -- ninja/src/deps_log.cc | 419 --- ninja/src/deps_log.h | 125 - ninja/src/deps_log_test.cc | 479 --- ninja/src/disk_interface.cc | 290 -- ninja/src/disk_interface.h | 100 - ninja/src/disk_interface_test.cc | 322 -- ninja/src/dyndep.cc | 83 - ninja/src/dyndep.h | 70 - ninja/src/dyndep_parser.cc | 95 - ninja/src/dyndep_parser.h | 36 - ninja/src/dyndep_parser_test.cc | 512 --- ninja/src/edit_distance.cc | 69 - ninja/src/edit_distance.h | 25 - ninja/src/edit_distance_test.cc | 48 - ninja/src/eval_env.cc | 147 - ninja/src/eval_env.h | 110 - ninja/src/exit_status.h | 24 - ninja/src/gen_doxygen_mainpage.sh | 92 - ninja/src/getopt.c | 410 --- ninja/src/getopt.h | 57 - ninja/src/graph.cc | 685 ---- ninja/src/graph.h | 323 -- ninja/src/graph_test.cc | 858 ----- ninja/src/graphviz.cc | 88 - ninja/src/graphviz.h | 40 - ninja/src/hash_collision_bench.cc | 63 - ninja/src/hash_map.h | 123 - ninja/src/includes_normalize-win32.cc | 209 -- ninja/src/includes_normalize.h | 39 - ninja/src/includes_normalize_test.cc | 167 - ninja/src/inline.sh | 25 - ninja/src/lexer.cc | 980 ------ ninja/src/lexer.h | 109 - ninja/src/lexer.in.cc | 316 -- ninja/src/lexer_test.cc | 96 - ninja/src/line_printer.cc | 166 - ninja/src/line_printer.h | 77 - ninja/src/manifest_parser.cc | 411 --- ninja/src/manifest_parser.h | 71 - ninja/src/manifest_parser_perftest.cc | 118 - ninja/src/manifest_parser_test.cc | 1157 ------- ninja/src/metrics.cc | 129 - ninja/src/metrics.h | 96 - ninja/src/minidump-win32.cc | 87 - ninja/src/msvc_helper-win32.cc | 106 - ninja/src/msvc_helper.h | 33 - ninja/src/msvc_helper_main-win32.cc | 148 - ninja/src/msvc_helper_test.cc | 39 - ninja/src/ninja.cc | 1429 -------- ninja/src/ninja_test.cc | 160 - ninja/src/parser.cc | 51 - ninja/src/parser.h | 50 - ninja/src/state.cc | 214 -- ninja/src/state.h | 134 - ninja/src/state_test.cc | 46 - ninja/src/string_piece.h | 97 - ninja/src/string_piece_util.cc | 78 - ninja/src/string_piece_util.h | 34 - ninja/src/string_piece_util_test.cc | 129 - ninja/src/subprocess-posix.cc | 351 -- ninja/src/subprocess-win32.cc | 297 -- ninja/src/subprocess.h | 114 - ninja/src/subprocess_test.cc | 261 -- ninja/src/test.cc | 235 -- ninja/src/test.h | 184 - ninja/src/timestamp.h | 33 - ninja/src/util.cc | 684 ---- ninja/src/util.h | 133 - ninja/src/util_test.cc | 430 --- ninja/src/version.cc | 54 - ninja/src/version.h | 32 - ninja/src/win32port.h | 39 - package.json | 5 +- packages/@rescript/darwin-arm64/bin.d.ts | 3 - packages/@rescript/darwin-arm64/bin.js | 3 - packages/@rescript/darwin-arm64/package.json | 3 - packages/@rescript/darwin-x64/bin.d.ts | 3 - packages/@rescript/darwin-x64/bin.js | 3 - packages/@rescript/darwin-x64/package.json | 3 - packages/@rescript/linux-arm64/bin.d.ts | 3 - packages/@rescript/linux-arm64/bin.js | 3 - packages/@rescript/linux-arm64/package.json | 3 - packages/@rescript/linux-x64/bin.d.ts | 3 - packages/@rescript/linux-x64/bin.js | 3 - packages/@rescript/linux-x64/package.json | 3 - packages/@rescript/win32-x64/bin.d.ts | 3 - packages/@rescript/win32-x64/bin.js | 3 - packages/@rescript/win32-x64/package.json | 3 - packages/artifacts.json | 6 - packages/playground/.gitignore | 1 - rewatch/src/cli.rs | 3 +- rewatch/testrepo/.gitignore | 1 - scripts/buildNinjaBinary.js | 19 - scripts/copyExes.js | 12 +- scripts/test.js | 10 +- tests/build_tests/cli_help/input.js | 3 +- tests/ounit_tests/dune | 2 +- tests/ounit_tests/ounit_bsb_pkg_tests.ml | 105 - tests/ounit_tests/ounit_bsb_regex_tests.ml | 180 - tests/ounit_tests/ounit_string_tests.ml | 3 - tests/ounit_tests/ounit_tests_main.ml | 2 - tests/tools_tests/package.json | 2 +- yarn.lock | 2 - 285 files changed, 62 insertions(+), 36914 deletions(-) delete mode 100644 analysis/examples/larger-project/.merlin delete mode 100755 cli/bstracing.js delete mode 100644 cli/common/bsb.js delete mode 100755 cli/rescript-legacy.js delete mode 100644 cli/rescript-legacy/dump.js delete mode 100644 cli/rescript-legacy/format.js delete mode 100644 compiler/bsb/bsb.md delete mode 100644 compiler/bsb/bsb_arg.ml delete mode 100644 compiler/bsb/bsb_arg.mli delete mode 100644 compiler/bsb/bsb_build_schemas.ml delete mode 100644 compiler/bsb/bsb_build_util.ml delete mode 100644 compiler/bsb/bsb_build_util.mli delete mode 100644 compiler/bsb/bsb_clean.ml delete mode 100644 compiler/bsb/bsb_clean.mli delete mode 100644 compiler/bsb/bsb_config.ml delete mode 100644 compiler/bsb/bsb_config.mli delete mode 100644 compiler/bsb/bsb_config_load.ml delete mode 100644 compiler/bsb/bsb_config_load.mli delete mode 100644 compiler/bsb/bsb_config_parse.ml delete mode 100644 compiler/bsb/bsb_config_parse.mli delete mode 100644 compiler/bsb/bsb_config_types.ml delete mode 100644 compiler/bsb/bsb_db_encode.ml delete mode 100644 compiler/bsb/bsb_db_encode.mli delete mode 100644 compiler/bsb/bsb_db_util.ml delete mode 100644 compiler/bsb/bsb_db_util.mli delete mode 100644 compiler/bsb/bsb_exception.ml delete mode 100644 compiler/bsb/bsb_exception.mli delete mode 100644 compiler/bsb/bsb_file_groups.ml delete mode 100644 compiler/bsb/bsb_file_groups.mli delete mode 100644 compiler/bsb/bsb_global_paths.ml delete mode 100644 compiler/bsb/bsb_global_paths.mli delete mode 100644 compiler/bsb/bsb_jsx.ml delete mode 100644 compiler/bsb/bsb_log.ml delete mode 100644 compiler/bsb/bsb_log.mli delete mode 100644 compiler/bsb/bsb_namespace_map_gen.ml delete mode 100644 compiler/bsb/bsb_namespace_map_gen.mli delete mode 100644 compiler/bsb/bsb_ninja_check.ml delete mode 100644 compiler/bsb/bsb_ninja_check.mli delete mode 100644 compiler/bsb/bsb_ninja_file_groups.ml delete mode 100644 compiler/bsb/bsb_ninja_file_groups.mli delete mode 100644 compiler/bsb/bsb_ninja_gen.ml delete mode 100644 compiler/bsb/bsb_ninja_gen.mli delete mode 100644 compiler/bsb/bsb_ninja_global_vars.ml delete mode 100644 compiler/bsb/bsb_ninja_regen.ml delete mode 100644 compiler/bsb/bsb_ninja_regen.mli delete mode 100644 compiler/bsb/bsb_ninja_rule.ml delete mode 100644 compiler/bsb/bsb_ninja_rule.mli delete mode 100644 compiler/bsb/bsb_ninja_targets.ml delete mode 100644 compiler/bsb/bsb_ninja_targets.mli delete mode 100644 compiler/bsb/bsb_package_kind.ml delete mode 100644 compiler/bsb/bsb_package_specs.ml delete mode 100644 compiler/bsb/bsb_package_specs.mli delete mode 100644 compiler/bsb/bsb_parse_sources.ml delete mode 100644 compiler/bsb/bsb_parse_sources.mli delete mode 100644 compiler/bsb/bsb_pkg.ml delete mode 100644 compiler/bsb/bsb_pkg.mli delete mode 100644 compiler/bsb/bsb_pkg_types.ml delete mode 100644 compiler/bsb/bsb_pkg_types.mli delete mode 100644 compiler/bsb/bsb_real_path.ml delete mode 100644 compiler/bsb/bsb_real_path.mli delete mode 100644 compiler/bsb/bsb_regex.ml delete mode 100644 compiler/bsb/bsb_regex.mli delete mode 100644 compiler/bsb/bsb_spec_set.ml delete mode 100644 compiler/bsb/bsb_spec_set.mli delete mode 100644 compiler/bsb/bsb_unix.ml delete mode 100644 compiler/bsb/bsb_unix.mli delete mode 100644 compiler/bsb/bsb_warning.ml delete mode 100644 compiler/bsb/bsb_warning.mli delete mode 100644 compiler/bsb/bsb_watcher_gen.ml delete mode 100644 compiler/bsb/bsb_watcher_gen.mli delete mode 100644 compiler/bsb/bsb_world.ml delete mode 100644 compiler/bsb/bsb_world.mli delete mode 100644 compiler/bsb/data_format.md delete mode 100644 compiler/bsb/dune delete mode 100644 compiler/bsb_exe/dune delete mode 100644 compiler/bsb_exe/rescript_main.ml delete mode 100644 compiler/bsb_exe/rescript_main.mli delete mode 100644 compiler/bsb_helper/bsb_db_decode.ml delete mode 100644 compiler/bsb_helper/bsb_db_decode.mli delete mode 100644 compiler/bsb_helper/bsb_helper_depfile_gen.ml delete mode 100644 compiler/bsb_helper/bsb_helper_depfile_gen.mli delete mode 100644 compiler/bsb_helper/dune delete mode 100644 compiler/bsb_helper_exe/bsb_helper_main.ml delete mode 100644 compiler/bsb_helper_exe/bsb_helper_main.mli delete mode 100644 compiler/bsb_helper_exe/dune delete mode 100644 compiler/ext/bsb_db.ml delete mode 100644 compiler/ext/bsb_db.mli delete mode 100644 ninja/.clang-format delete mode 100644 ninja/.gitignore delete mode 100644 ninja/COPYING delete mode 100644 ninja/HACKING.md delete mode 100644 ninja/README delete mode 100755 ninja/bootstrap.py delete mode 100755 ninja/configure.py delete mode 100644 ninja/doc/README.md delete mode 100644 ninja/doc/dblatex.xsl delete mode 100644 ninja/doc/docbook.xsl delete mode 100644 ninja/doc/doxygen.config delete mode 100644 ninja/doc/manual.asciidoc delete mode 100644 ninja/doc/style.css delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_build delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_default delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_include delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_pool delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_rule delete mode 100644 ninja/misc/afl-fuzz-tokens/kw_subninja delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_a delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_b delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_colon delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_cont delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_dollar delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_eq delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_indent delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_pipe delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_pipepipe delete mode 100644 ninja/misc/afl-fuzz-tokens/misc_space delete mode 100644 ninja/misc/afl-fuzz/build.ninja delete mode 100644 ninja/misc/bash-completion delete mode 100755 ninja/misc/ci.py delete mode 100644 ninja/misc/inherited-fds.ninja delete mode 100644 ninja/misc/long-slow-build.ninja delete mode 100755 ninja/misc/measure.py delete mode 100644 ninja/misc/ninja-mode.el delete mode 100644 ninja/misc/ninja.vim delete mode 100644 ninja/misc/ninja_syntax.py delete mode 100755 ninja/misc/ninja_syntax_test.py delete mode 100755 ninja/misc/output_test.py delete mode 100644 ninja/misc/packaging/ninja.spec delete mode 100755 ninja/misc/packaging/rpmbuild.sh delete mode 100644 ninja/misc/write_fake_manifests.py delete mode 100644 ninja/misc/zsh-completion delete mode 100644 ninja/src/browse.cc delete mode 100644 ninja/src/browse.h delete mode 100755 ninja/src/browse.py delete mode 100644 ninja/src/build.cc delete mode 100644 ninja/src/build.h delete mode 100644 ninja/src/build_log.cc delete mode 100644 ninja/src/build_log.h delete mode 100644 ninja/src/build_log_perftest.cc delete mode 100644 ninja/src/build_log_test.cc delete mode 100644 ninja/src/build_test.cc delete mode 100644 ninja/src/canon_perftest.cc delete mode 100644 ninja/src/clean.cc delete mode 100644 ninja/src/clean.h delete mode 100644 ninja/src/clean_test.cc delete mode 100644 ninja/src/clparser.cc delete mode 100644 ninja/src/clparser.h delete mode 100644 ninja/src/clparser_perftest.cc delete mode 100644 ninja/src/clparser_test.cc delete mode 100644 ninja/src/debug_flags.cc delete mode 100644 ninja/src/debug_flags.h delete mode 100644 ninja/src/depfile_parser.cc delete mode 100644 ninja/src/depfile_parser.h delete mode 100644 ninja/src/depfile_parser.in.cc delete mode 100644 ninja/src/depfile_parser_perftest.cc delete mode 100644 ninja/src/depfile_parser_test.cc delete mode 100644 ninja/src/deps_log.cc delete mode 100644 ninja/src/deps_log.h delete mode 100644 ninja/src/deps_log_test.cc delete mode 100644 ninja/src/disk_interface.cc delete mode 100644 ninja/src/disk_interface.h delete mode 100644 ninja/src/disk_interface_test.cc delete mode 100644 ninja/src/dyndep.cc delete mode 100644 ninja/src/dyndep.h delete mode 100644 ninja/src/dyndep_parser.cc delete mode 100644 ninja/src/dyndep_parser.h delete mode 100644 ninja/src/dyndep_parser_test.cc delete mode 100644 ninja/src/edit_distance.cc delete mode 100644 ninja/src/edit_distance.h delete mode 100644 ninja/src/edit_distance_test.cc delete mode 100644 ninja/src/eval_env.cc delete mode 100644 ninja/src/eval_env.h delete mode 100644 ninja/src/exit_status.h delete mode 100755 ninja/src/gen_doxygen_mainpage.sh delete mode 100644 ninja/src/getopt.c delete mode 100644 ninja/src/getopt.h delete mode 100644 ninja/src/graph.cc delete mode 100644 ninja/src/graph.h delete mode 100644 ninja/src/graph_test.cc delete mode 100644 ninja/src/graphviz.cc delete mode 100644 ninja/src/graphviz.h delete mode 100644 ninja/src/hash_collision_bench.cc delete mode 100644 ninja/src/hash_map.h delete mode 100644 ninja/src/includes_normalize-win32.cc delete mode 100644 ninja/src/includes_normalize.h delete mode 100644 ninja/src/includes_normalize_test.cc delete mode 100755 ninja/src/inline.sh delete mode 100644 ninja/src/lexer.cc delete mode 100644 ninja/src/lexer.h delete mode 100644 ninja/src/lexer.in.cc delete mode 100644 ninja/src/lexer_test.cc delete mode 100644 ninja/src/line_printer.cc delete mode 100644 ninja/src/line_printer.h delete mode 100644 ninja/src/manifest_parser.cc delete mode 100644 ninja/src/manifest_parser.h delete mode 100644 ninja/src/manifest_parser_perftest.cc delete mode 100644 ninja/src/manifest_parser_test.cc delete mode 100644 ninja/src/metrics.cc delete mode 100644 ninja/src/metrics.h delete mode 100644 ninja/src/minidump-win32.cc delete mode 100644 ninja/src/msvc_helper-win32.cc delete mode 100644 ninja/src/msvc_helper.h delete mode 100644 ninja/src/msvc_helper_main-win32.cc delete mode 100644 ninja/src/msvc_helper_test.cc delete mode 100644 ninja/src/ninja.cc delete mode 100644 ninja/src/ninja_test.cc delete mode 100644 ninja/src/parser.cc delete mode 100644 ninja/src/parser.h delete mode 100644 ninja/src/state.cc delete mode 100644 ninja/src/state.h delete mode 100644 ninja/src/state_test.cc delete mode 100644 ninja/src/string_piece.h delete mode 100644 ninja/src/string_piece_util.cc delete mode 100644 ninja/src/string_piece_util.h delete mode 100644 ninja/src/string_piece_util_test.cc delete mode 100644 ninja/src/subprocess-posix.cc delete mode 100644 ninja/src/subprocess-win32.cc delete mode 100644 ninja/src/subprocess.h delete mode 100644 ninja/src/subprocess_test.cc delete mode 100644 ninja/src/test.cc delete mode 100644 ninja/src/test.h delete mode 100644 ninja/src/timestamp.h delete mode 100644 ninja/src/util.cc delete mode 100644 ninja/src/util.h delete mode 100644 ninja/src/util_test.cc delete mode 100644 ninja/src/version.cc delete mode 100644 ninja/src/version.h delete mode 100644 ninja/src/win32port.h delete mode 100755 scripts/buildNinjaBinary.js delete mode 100644 tests/ounit_tests/ounit_bsb_pkg_tests.ml delete mode 100644 tests/ounit_tests/ounit_bsb_regex_tests.ml diff --git a/.devcontainer/.dockerignore b/.devcontainer/.dockerignore index cfecf53afab..64e4791ad49 100644 --- a/.devcontainer/.dockerignore +++ b/.devcontainer/.dockerignore @@ -46,9 +46,6 @@ node_modules *.dump coverage -.ninja_log -.bsdeps -.bsbuild lib/ocaml tests/build_tests/*/lib/ #ignore temporary directory @@ -94,6 +91,3 @@ tests/analysis_tests/**/*.bs.js # package tarballs package.tgz *.exe - -/ninja/ninja -/ninja/build.ninja \ No newline at end of file diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index f6906258a38..65ae5965274 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -5,9 +5,7 @@ LABEL org.opencontainers.image.description="Docker image for ReScript developmen RUN sudo apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install --no-install-recommends \ cmake \ - ninja-build \ musl-tools \ - python-is-python3 \ && rm -rf /var/lib/apt/lists/* # Install OPAM diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 10816a3bc68..63d2f8ad77f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -296,43 +296,6 @@ jobs: - name: Copy compiler exes to platform bin dir run: node scripts/copyExes.js --compiler - - name: Restore ninja build cache - id: ninja-build-cache - uses: actions/cache@v5 - with: - path: ninja/ninja${{ matrix.exe-suffix }} - key: ninja-build-v2-${{ matrix.os }}-${{ hashFiles('ninja/src/**') }} - - - name: Setup Python for ninja build - if: steps.ninja-build-cache.outputs.cache-hit != 'true' - uses: actions/setup-python@v6 - with: - python-version: "3.13" - - - name: Setup MSVC for ninja build (Windows) - if: steps.ninja-build-cache.outputs.cache-hit != 'true' && runner.os == 'Windows' - uses: TheMrMilchmann/setup-msvc-dev@v4 - with: - arch: x64 - - - name: Build ninja - if: steps.ninja-build-cache.outputs.cache-hit != 'true' && runner.os != 'Linux' - run: node scripts/buildNinjaBinary.js - - - name: Build ninja (Linux static) - if: steps.ninja-build-cache.outputs.cache-hit != 'true' && runner.os == 'Linux' - env: - LDFLAGS: -static - run: node scripts/buildNinjaBinary.js - - # Prevent ninja being rebuilt on playground build step later - - name: Touch ninja/ninja - if: ${{ matrix.build_playground && steps.ninja-build-cache.outputs.cache-hit == 'true' }} - run: touch ninja/ninja${{ matrix.exe-suffix }} - - - name: Copy ninja exe to platform bin dir - run: node scripts/copyExes.js --ninja - - name: "Syntax: Run tests" env: ROUNDTRIP_TEST: ${{ runner.os == 'Windows' && '0' || '1' }} diff --git a/.gitignore b/.gitignore index 386c70d047b..d31b5d90603 100644 --- a/.gitignore +++ b/.gitignore @@ -48,9 +48,6 @@ node_modules *.dump coverage -.ninja_log -.bsdeps -.bsbuild lib/ocaml tests/build_tests/*/lib/ #ignore temporary directory diff --git a/AGENTS.md b/AGENTS.md index 22fc37c7002..90a183fe73a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -25,9 +25,8 @@ The Makefile’s targets build on each other in this order: 1. `yarn-install` runs automatically for targets that need JavaScript tooling (lib, playground, tests, formatting, etc.). 2. `build` (default target) builds the toolchain binaries (all copied into `packages/@rescript//bin`): - - `compiler` builds the dune executables (`bsc`, `bsb_helper`, `rescript-*`, `ounit_tests`, etc.). + - `compiler` builds the dune executables (`bsc`, `rescript-*`, `ounit_tests`, etc.). - `rewatch` builds the Rust-based ReScript build system and CLI. - - `ninja` bootstraps the ninja binary (part of the legacy build system). 3. `lib` uses those toolchain outputs to build the runtime sources. 4. Test targets (`make test`, `make test-syntax`, etc.) reuse everything above. @@ -82,7 +81,6 @@ compiler/ ├── ml/ # OCaml compiler infrastructure ├── core/ # Core compilation (lam_*, js_* files) ├── ext/ # Extended utilities and data structures -├── bsb/ # Legacy build system └── gentype/ # TypeScript generation analysis/ # Language server and tooling @@ -254,7 +252,6 @@ The compiler is designed for fast feedback loops and scales to large codebases: - **Build System**: dune with profiles (dev, release, browser) - **JavaScript**: Node.js 20+ for tooling - **Rust**: Toolchain needed for rewatch -- **Python**: 3 required for building ninja ## Common Tasks @@ -283,7 +280,7 @@ The compiler is designed for fast feedback loops and scales to large codebases: ### Rewatch Architecture -Rewatch is the modern build system written in Rust that replaces the legacy bsb (BuckleScript) build system. It provides faster incremental builds, better error messages, and improved developer experience. +Rewatch is ReScript's build system written in Rust. It provides fast incremental builds, better error messages, and improved developer experience. #### Key Components @@ -410,13 +407,6 @@ When clippy suggests refactoring that could impact performance, consider the tra - **Avoid Unnecessary Type Conversions**: When threading parameters through multiple function calls, use consistent types (e.g., `String` throughout) rather than converting between `String` and `&str` at each boundary. This eliminates unnecessary allocations and conversions. -#### Compatibility with Legacy bsb - -- **Command-line Flags**: Maintain compatibility with bsb flags where possible -- **Configuration**: Support both old (`bs-*`) and new field names -- **Output Format**: Generate compatible build artifacts -- **Error Messages**: Provide clear migration guidance - ### Common Tasks #### Adding New CLI Flags diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 29fb024e561..56e76ca0ce8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,7 +17,6 @@ Happy hacking! - [Node.js](https://nodejs.org/) v22.x or newer - [Yarn CLI](https://yarnpkg.com/getting-started/install) (can be installed with `corepack`, Homebrew, etc) - C compiler toolchain (usually installed with `xcode` on Mac) -- Python <= 3.11 (required to build ninja) - Rust toolchain (required to build rewatch; follow the instructions at https://www.rust-lang.org/tools/install and install the version listed as `rust-version` in `rewatch/Cargo.toml`) - `opam` (OCaml Package Manager) v2.2.0 or newer - VSCode (+ [OCaml Platform Extension](https://marketplace.visualstudio.com/items?itemName=ocamllabs.ocaml-platform)) @@ -103,12 +102,10 @@ You can also open this dev container with [GitHub Codespaces](https://github.com ## Building the Compiler -To build the compiler, the build tools (rewatch and ninja), and the ReScript runtime/standard library, just run: - Main targets: ```sh -# Build the compiler and the build tools (rewatch and ninja) +# Build the compiler and the build system (rewatch) make # Build the runtime/standard library @@ -127,9 +124,6 @@ make compiler # Build rewatch only make rewatch -# Build the ninja tool only -make ninja - # Run syntax tests make test-syntax @@ -230,10 +224,10 @@ node scripts/test.js -mocha **Run build system test (integration tests):** -This will run the whole build system test suite defined in `tests/build_tests`. +This will run the build system test suite defined in `tests/build_tests`. ``` -node scripts/test.js -bsb +node scripts/test.js -build ``` **Run ounit tests:** diff --git a/CREDITS.md b/CREDITS.md index acb48f1906a..a347083dc37 100644 --- a/CREDITS.md +++ b/CREDITS.md @@ -44,10 +44,6 @@ This file was imported but changed significantly in iterations later. ReScript compiler unit test builds on [OUnit](http://ounit.forge.ocamlcore.org/) -## Ninja - -Thanks to [ninja-build](https://ninja-build.org), ReScript also comes with a blazing fast build tool on top of it, `ninja` is a truly [well engineered](http://aosabook.org/en/posa/ninja.html) scalable build tool. - ## Bloomberg and Facebook Thanks to [Bloomberg](https://www.techatbloomberg.com) and [Facebook](https://github.com/facebook/). This project began at Bloomberg and was published in 2016; without the support of Bloomberg, it would not have happened. This project's funded by Facebook since July 2017. diff --git a/Makefile b/Makefile index 26a2ca52df3..088d3d60003 100644 --- a/Makefile +++ b/Makefile @@ -59,7 +59,7 @@ RUNTIME_BUILD_STAMP := packages/@rescript/runtime/.buildstamp # Default target -build: compiler rewatch ninja +build: compiler rewatch # Yarn @@ -72,25 +72,6 @@ $(YARN_INSTALL_STAMP): $(YARN_INSTALL_SOURCES) yarn install touch $@ -# Ninja - -NINJA_SOURCES = $(wildcard ninja/src/*.cc ninja/src/*.h) $(wildcard ninja/*.py) -NINJA_EXE = $(BIN_DIR)/ninja.exe - -ninja: $(NINJA_EXE) - -ninja/ninja: $(NINJA_SOURCES) -ifeq ($(OS),Darwin) - export CXXFLAGS="-flto" -endif - cd ninja && python3 configure.py --bootstrap --verbose - -$(NINJA_EXE): ninja/ninja - $(call COPY_EXE,$<,$@) - -clean-ninja: - rm -rf $(NINJA_EXE) ninja/build.ninja ninja/build ninja/misc/__pycache__ ninja/ninja - # Rewatch REWATCH_SOURCES = $(shell find rewatch/src -name '*.rs') rewatch/Cargo.toml rewatch/Cargo.lock rewatch/rust-toolchain.toml @@ -119,7 +100,7 @@ clean-rewatch: COMPILER_SOURCE_DIRS := compiler tests analysis tools COMPILER_SOURCES = $(shell find $(COMPILER_SOURCE_DIRS) -type f \( -name '*.ml' -o -name '*.mli' -o -name '*.dune' -o -name dune -o -name dune-project \)) -COMPILER_BIN_NAMES := bsc bsb_helper rescript-legacy rescript-editor-analysis rescript-tools +COMPILER_BIN_NAMES := bsc rescript-editor-analysis rescript-tools COMPILER_EXES := $(addsuffix .exe,$(addprefix $(BIN_DIR)/,$(COMPILER_BIN_NAMES))) COMPILER_DUNE_BINS := $(addsuffix $(PLATFORM_EXE_EXT),$(addprefix $(DUNE_BIN_DIR)/,$(COMPILER_BIN_NAMES))) @@ -166,20 +147,20 @@ artifacts: lib bench: compiler $(DUNE_BIN_DIR)/syntax_benchmarks -test: lib ninja +test: lib node scripts/test.js -all -test-analysis: lib ninja +test-analysis: lib make -C tests/analysis_tests clean test -test-reanalyze: lib ninja +test-reanalyze: lib make -C tests/analysis_tests/tests-reanalyze/deadcode test # Benchmark reanalyze on larger codebase (COPIES=N for more files) -benchmark-reanalyze: lib ninja +benchmark-reanalyze: lib make -C tests/analysis_tests/tests-reanalyze/deadcode-benchmark benchmark COPIES=$(or $(COPIES),50) -test-tools: lib ninja +test-tools: lib make -C tests/tools_tests clean test test-syntax: compiler @@ -188,7 +169,7 @@ test-syntax: compiler test-syntax-roundtrip: compiler ROUNDTRIP_TEST=1 ./scripts/test_syntax.sh -test-gentype: lib ninja +test-gentype: lib make -C tests/gentype_tests/typescript-react-example clean test make -C tests/gentype_tests/stdlib-no-shims clean test @@ -217,7 +198,7 @@ $(PLAYGROUND_BUILD_STAMP): $(COMPILER_SOURCES) # Creates all the relevant core and third party cmij files to side-load together with the playground bundle playground-cmijs: $(PLAYGROUND_CMI_BUILD_STAMP) -$(PLAYGROUND_CMI_BUILD_STAMP): $(RUNTIME_BUILD_STAMP) $(NINJA_EXE) +$(PLAYGROUND_CMI_BUILD_STAMP): $(RUNTIME_BUILD_STAMP) yarn workspace playground build playground-test: playground @@ -244,11 +225,11 @@ clean-gentype: clean-tests: clean-gentype -clean: clean-lib clean-compiler clean-rewatch clean-ninja +clean: clean-lib clean-compiler clean-rewatch dev-container: docker build -t rescript-dev-container docker .DEFAULT_GOAL := build -.PHONY: yarn-install build ninja rewatch compiler lib artifacts bench test test-analysis test-reanalyze benchmark-reanalyze test-tools test-syntax test-syntax-roundtrip test-gentype test-rewatch test-all playground playground-compiler playground-test playground-cmijs playground-release format checkformat clean-ninja clean-rewatch clean-compiler clean-lib clean-gentype clean-tests clean dev-container +.PHONY: yarn-install build rewatch compiler lib artifacts bench test test-analysis test-reanalyze benchmark-reanalyze test-tools test-syntax test-syntax-roundtrip test-gentype test-rewatch test-all playground playground-compiler playground-test playground-cmijs playground-release format checkformat clean-rewatch clean-compiler clean-lib clean-gentype clean-tests clean dev-container diff --git a/README.md b/README.md index 3f54f22aec1..e56b93f3b59 100644 --- a/README.md +++ b/README.md @@ -96,9 +96,6 @@ ReScript is licensed under LGPL version 3, with relaxed rules about creating and The ReScript parser (subdirectory `compiler/syntax`) is licensed under the [MIT License](compiler/syntax/LICENSE). -The `ninja` subdirectory contains the vendored [ninja](https://github.com/ninja-build/ninja) build system. -Refer to its copyright and license notices for information about its licensing. - ## 🏅 Acknowledgments ReScript was originally created by [Hongbo Zhang](https://github.com/bobzhang) in 2015. diff --git a/analysis/examples/example-project/src/Json.res b/analysis/examples/example-project/src/Json.res index 7cbfbbe0376..6ae48dfe4b8 100644 --- a/analysis/examples/example-project/src/Json.res +++ b/analysis/examples/example-project/src/Json.res @@ -1,6 +1,4 @@ @@ocaml.doc(" # Json parser - * - * Works with bucklescript and bsb-native * * ## Basics * diff --git a/analysis/examples/larger-project/.merlin b/analysis/examples/larger-project/.merlin deleted file mode 100644 index 05434917e97..00000000000 --- a/analysis/examples/larger-project/.merlin +++ /dev/null @@ -1,14 +0,0 @@ -####{BSB GENERATED: NO EDIT -FLG -ppx '/home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/linux/bsc.exe -as-ppx -bs-jsx 3' -S /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/lib/ocaml -B /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/lib/ocaml -FLG -w +a-4-9-20-40-41-42-50-61-102 -S /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@rescript/react/lib/ocaml -B /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@rescript/react/lib/ocaml -S /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@glennsl/bs-json/lib/ocaml -B /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@glennsl/bs-json/lib/ocaml -S src -B lib/bs/src -S src/exception -B lib/bs/src/exception -####BSB GENERATED: NO EDIT} diff --git a/analysis/examples/workspace-project/.gitignore b/analysis/examples/workspace-project/.gitignore index 9ea0bb15253..b06cb11e84d 100644 --- a/analysis/examples/workspace-project/.gitignore +++ b/analysis/examples/workspace-project/.gitignore @@ -23,7 +23,6 @@ lib *.mliast .vscode .merlin -.bsb.lock /node_modules/ package-lock.json yarn.lock diff --git a/analysis/vendor/json/Json.ml b/analysis/vendor/json/Json.ml index 407afb152bd..bfa60adc618 100644 --- a/analysis/vendor/json/Json.ml +++ b/analysis/vendor/json/Json.ml @@ -1,6 +1,4 @@ (** # Json parser - * - * Works with bucklescript and bsb-native * * ## Basics * diff --git a/biome.json b/biome.json index 77c75d66220..587892a380b 100644 --- a/biome.json +++ b/biome.json @@ -66,7 +66,6 @@ "lib/es6/**", "lib/js/**", "lib/bs/**", - "ninja/**", "playground/**", "*.bs.js", "*.res.js", diff --git a/cli/bstracing.js b/cli/bstracing.js deleted file mode 100755 index 7eb661dbce8..00000000000 --- a/cli/bstracing.js +++ /dev/null @@ -1,270 +0,0 @@ -#!/usr/bin/env node - -// @ts-check - -import * as fs from "node:fs"; -import * as path from "node:path"; -import * as readline from "node:readline"; - -/** - * - * @param {string} file - * @param {(line:string)=>void} lineCb - * @param {()=>void} finish - */ -function processEntry(file, lineCb, finish) { - const input = fs.createReadStream(file); - input.on("error", error => { - console.error(error.message); - console.error( - "make sure you are running after bsb building and in the top directory", - ); - process.exit(2); - }); - const rl = readline.createInterface({ - input: input, - crlfDelay: Number.POSITIVE_INFINITY, - }); - - rl.on("line", lineCb); - rl.on("close", finish); -} - -class Interval { - /** - * - * @param {number} start - * @param {number} end - */ - constructor(start, end) { - this.start = start; - this.end = end; - /** - * @type {string[]} - */ - this.targets = []; - } -} - -class Threads { - constructor() { - /** - * @type {number[]} - */ - this.workers = []; - } - /** - * - * @param {{start : number; end : number}} target - */ - alloc(target) { - for (let i = 0; i < this.workers.length; ++i) { - if (this.workers[i] <= target.start) { - this.workers[i] = target.end; - return i; - } - } - this.workers.push(target.end); - return this.workers.length - 1; - } -} -/** - * - * @param {Map} map - * @param {string} key - * @param {Interval} def - * - * */ -function setDefault(map, key, def) { - if (map.has(key)) { - return map.get(key); - } - map.set(key, def); - return def; -} - -// https://github.com/catapult-project/catapult/blob/master/tracing/tracing/base/color_scheme.html#L50 -const colors = [ - "thread_state_uninterruptible", - "thread_state_iowait", - "thread_state_running", - "thread_state_runnable", - "thread_state_sleeping", - "thread_state_unknown", - "background_memory_dump", - "light_memory_dump", - "detailed_memory_dump", - "vsync_highlight_color", - "generic_work", - "good", - "bad", - "terrible", - "black", - "grey", - "white", - "yellow", - "olive", - "rail_response", - "rail_animation", - "rail_idle", - "rail_load", - "startup", - "heap_dump_stack_frame", - "heap_dump_object_type", - "heap_dump_child_node_arrow", - "cq_build_running", - "cq_build_passed", - "cq_build_failed", - "cq_build_abandoned", - "cq_build_attempt_runnig", - "cq_build_attempt_passed", - "cq_build_attempt_failed", -]; - -const allocated = new Map(); - -function getColorName(obj, cat) { - obj.cat = cat; - let i; - if (allocated.has(cat)) { - i = allocated.get(cat); - } else { - allocated.set(cat, allocated.size); - } - obj.cname = colors[i % colors.length]; - return; -} -/** - * - * @param {Interval} target - */ -function category(target, obj) { - const targets = target.targets; - if (targets.length === 1) { - const curTar = targets[0]; - if (curTar.endsWith(".d")) { - getColorName(obj, "dep"); - } else if (curTar.endsWith(".mlast") || curTar.endsWith(".mliast")) { - getColorName(obj, "parse"); - } else if (curTar.endsWith(".cmi")) { - getColorName(obj, "cmi"); - } else if (curTar.endsWith(".cmj")) { - getColorName(obj, "cmj-only"); - } else { - getColorName(obj, "unknown"); - } - } else { - getColorName(obj, "cmj"); - } - obj.name = target.targets.map(x => path.parse(x).base).join(","); - return obj; -} -/** - * @param {string} file - * @param {boolean} showAll - * @param {string} outputFile - */ -function readIntervals(file, showAll, outputFile) { - let lastEndSeen = 0; - /** - * @type {Map} - */ - let targets = new Map(); - let offset = 0; - processEntry( - file, - line => { - const lineTrim = line.trim(); - if (lineTrim.startsWith("#")) { - return; - } - - let [start, end, _, name, cmdHash] = lineTrim.split("\t"); - cmdHash += `/${end}`; - if (+end < lastEndSeen) { - // This is a guess - // it could be wrong, when there's multiple small compilation - if (showAll) { - offset += lastEndSeen + 1000; - console.log(`new session starting from: ${name} : ${offset}`); - } else { - targets = new Map(); - } - } - lastEndSeen = +end; // new mark - setDefault( - targets, - cmdHash, - new Interval(Number(start) + offset, Number(end) + offset), - ).targets.push(name); - }, - () => { - const sorted = [...targets.values()].sort((a, b) => { - return a.start - b.start; - }); - const jsonArray = []; - const threads = new Threads(); - for (const target of sorted) { - jsonArray.push( - category(target, { - ph: "X", - pid: 0, - dur: (target.end - target.start) * 1000, - ts: target.start * 1000, - tid: threads.alloc(target), - args: {}, - }), - ); - } - console.log(` ${outputFile} is produced, loade it via chrome://tracing/`); - fs.writeFileSync(outputFile, JSON.stringify(jsonArray), "utf8"); - }, - ); -} -const logName = ".ninja_log"; - -/** - * @type {string} - */ -let file; -/** - * - * @param ps {string[]} - */ -function tryLocation(ps) { - for (const p of ps) { - const log = path.join(p, logName); - if (fs.existsSync(log)) { - file = log; - return; - } - } - console.error( - "no .ninja_log found in specified paths, make sure you set bstracing to the proper directory", - ); - process.exit(2); -} - -let showAll = false; -const curDate = new Date(); -let outputFile = `tracing_${curDate.getHours()}_${curDate.getMinutes()}_${curDate.getSeconds()}.json`; - -{ - let index = process.argv.indexOf("-C"); - if (index >= 0) { - const p = process.argv[index + 1]; - tryLocation([p, path.join(p, "lib", "bs")]); - } else { - tryLocation([".", path.join("lib", "bs")]); - } - if (process.argv.includes("-all")) { - showAll = true; - } - index = process.argv.indexOf("-o"); - if (index >= 0) { - outputFile = process.argv[index + 1]; - } -} - -console.log("loading build log", file, "is used"); -readIntervals(file, showAll, outputFile); diff --git a/cli/common/bins.js b/cli/common/bins.js index 6007f091515..5800a54f5ea 100644 --- a/cli/common/bins.js +++ b/cli/common/bins.js @@ -39,12 +39,9 @@ if (supportedPlatforms.includes(target)) { export const { binDir, binPaths: { - bsb_helper_exe, bsc_exe, - ninja_exe, rescript_editor_analysis_exe, rescript_tools_exe, - rescript_legacy_exe, rescript_exe, }, } = mod; diff --git a/cli/common/bsb.js b/cli/common/bsb.js deleted file mode 100644 index 8d0cf557ff9..00000000000 --- a/cli/common/bsb.js +++ /dev/null @@ -1,506 +0,0 @@ -// @ts-check - -import * as child_process from "node:child_process"; -import * as fs from "node:fs"; -import { createServer } from "node:http"; -import * as os from "node:os"; -import * as path from "node:path"; - -import { runtimePath } from "../common/runtime.js"; -import { rescript_legacy_exe } from "./bins.js"; -import { WebSocket } from "./minisocket.js"; - -const cwd = process.cwd(); -const lockFileName = path.join(cwd, ".bsb.lock"); - -/** - * @typedef {Object} ProjectFiles - * @property {Array} dirs - * @property {Array} generated - */ - -/** - * @typedef {Object} WatcherRef - * @property {string} dir - * @property {fs.FSWatcher} watcher - */ - -/** - * @type {child_process.ChildProcess | null} - */ -let ownerProcess = null; -export function releaseBuild() { - if (ownerProcess) { - ownerProcess.kill("SIGHUP"); - try { - fs.rmSync(lockFileName); - } catch {} - ownerProcess = null; - } -} - -/** - * We use [~perm:0o664] rather than our usual default perms, [0o666], because - * lock files shouldn't rely on the umask to disallow tampering by other. - * - * @param {Array} args - * @param {child_process.SpawnOptions} [options] - */ -function acquireBuild(args, options) { - if (ownerProcess) { - return null; - } - - if (args[0] === "build" && !args.includes("-runtime-path")) { - args.push("-runtime-path", runtimePath); - } - - try { - ownerProcess = child_process.spawn(rescript_legacy_exe, args, { - stdio: "inherit", - ...options, - }); - fs.writeFileSync(lockFileName, ownerProcess.pid.toString(), { - encoding: "utf8", - flag: "wx", - mode: 0o664, - }); - } catch (err) { - if (err.code === "EEXIST") { - console.warn(lockFileName, "already exists, try later"); - } else console.log(err); - } - return ownerProcess; -} - -/** - * @param {Array} args - * @param {(code: number) => void} [maybeOnClose] - */ -function delegate(args, maybeOnClose) { - const p = acquireBuild(args); - if (p) { - p.once("error", e => { - console.error(String(e)); - releaseBuild(); - process.exit(2); - }); - - // The 'close' event will always emit after 'exit' was already emitted, or - // 'error' if the child failed to spawn. - p.once("close", code => { - releaseBuild(); - const exitCode = code === null ? 1 : code; - if (maybeOnClose) { - maybeOnClose(exitCode); - return; - } - process.exit(exitCode); - }); - } else { - console.warn(`Another build detected or stale lockfile ${lockFileName}`); - // rasing magic code - process.exit(133); - } -} - -/** - * @param {Array} args - */ -export function info(args) { - delegate(["info", ...args]); -} - -/** - * @param {Array} args - */ -export function clean(args) { - delegate(["clean", ...args]); -} - -const shouldColorizeError = - process.stderr.isTTY || process.env.FORCE_COLOR === "1"; -const shouldColorize = process.stdout.isTTY || process.env.FORCE_COLOR === "1"; - -/** - * @type {[number,number]} - */ -let startTime; -function updateStartTime() { - startTime = process.hrtime(); -} -function updateFinishTime() { - const diff = process.hrtime(startTime); - return diff[0] * 1e9 + diff[1]; -} - -/** - * @param {number} [code] - */ -function logFinishCompiling(code) { - let log = ">>>> Finish compiling"; - if (code) { - log = `${log} (exit: ${code})`; - } - if (shouldColorize) { - log = `\x1b[36m${log}\x1b[0m`; - } - if (code) { - console.log(log); - } else { - console.log(log, Math.floor(updateFinishTime() / 1e6), "mseconds"); - } -} - -function logStartCompiling() { - updateStartTime(); - let log = ">>>> Start compiling"; - if (shouldColorize) { - log = `\x1b[36m${log}\x1b[0m`; - } - console.log(log); -} - -function exitProcess() { - releaseBuild(); - process.exit(0); -} - -/** - * @param {string} file - * @returns - */ -function getProjectFiles(file) { - if (fs.existsSync(file)) { - return JSON.parse(fs.readFileSync(file, "utf8")); - } - return { dirs: [], generated: [] }; -} - -/** - * @param {Array} args - */ -function watch(args) { - // All clients of type MiniWebSocket - /** - * @type {any[]} - */ - let wsClients = []; - let withWebSocket = false; - let webSocketHost = "localhost"; - let webSocketPort = 9999; - - let resConfig = "rescript.json"; - if (!fs.existsSync(resConfig)) { - resConfig = "bsconfig.json"; - } - - const sourcedirs = path.join("lib", "bs", ".sourcedirs.json"); - - let LAST_SUCCESS_BUILD_STAMP = 0; - - let LAST_BUILD_START = 0; - let LAST_FIRED_EVENT = 0; - /** - * @type {[string,string][]} - */ - let reasonsToRebuild = []; - /** - * @type {string[]} - */ - let watchGenerated = []; - - /** - * @type {WatcherRef[]} - * watchers are held so that we close it later - */ - let watchers = []; - - const verbose = args.includes("-verbose"); - const dlog = verbose ? console.log : () => {}; - - const wsParamIndex = args.indexOf("-ws"); - if (wsParamIndex > -1) { - const hostAndPortNumber = (args[wsParamIndex + 1] || "").split(":"); - /** - * @type {number} - */ - let portNumber; - if (hostAndPortNumber.length === 1) { - portNumber = Number.parseInt(hostAndPortNumber[0]); - } else { - webSocketHost = hostAndPortNumber[0]; - portNumber = Number.parseInt(hostAndPortNumber[1]); - } - if (!Number.isNaN(portNumber)) { - webSocketPort = portNumber; - } - withWebSocket = true; - dlog(`WebSocket host & port number: ${webSocketHost}:${webSocketPort}`); - } - - const rescriptWatchBuildArgs = verbose - ? ["build", "-no-deps", "-verbose"] - : ["build", "-no-deps"]; - - function notifyClients() { - wsClients = wsClients.filter(x => !x.closed && !x.socket.destroyed); - const wsClientsLen = wsClients.length; - dlog(`Alive sockets number: ${wsClientsLen}`); - const data = JSON.stringify({ LAST_SUCCESS_BUILD_STAMP }); - for (let i = 0; i < wsClientsLen; i++) { - // in reverse order, the last pushed get notified earlier - const client = wsClients[wsClientsLen - i - 1]; - if (!client.closed) { - client.sendText(data); - } - } - } - - function setUpWebSocket() { - const _id = setInterval(notifyClients, 3000); - createServer() - .on("upgrade", (req, socket, upgradeHead) => { - dlog("connection opened"); - const ws = new WebSocket(req, socket, upgradeHead); - socket.on("error", err => { - dlog(`Socket Error ${err}`); - }); - wsClients.push(ws); - }) - .on("error", err => { - // @ts-ignore - if (err !== undefined && err.code === "EADDRINUSE") { - const error = shouldColorize ? "\x1b[1;31mERROR:\x1b[0m" : "ERROR:"; - console.error(`${error} The websocket port number ${webSocketPort} is in use. -Please pick a different one using the \`-ws [host:]port\` flag from bsb.`); - } else { - console.error(err); - } - process.exit(2); - }) - .listen(webSocketPort, webSocketHost); - } - - /** - * @param {ProjectFiles} projectFiles - */ - function watchBuild(projectFiles) { - const watchFiles = projectFiles.dirs; - watchGenerated = projectFiles.generated; - // close and remove all unused watchers - watchers = watchers.filter(watcher => { - if (watcher.dir === resConfig) { - return true; - } - if (watchFiles.indexOf(watcher.dir) < 0) { - dlog(`${watcher.dir} is no longer watched`); - watcher.watcher.close(); - return false; - } - return true; - }); - - // adding new watchers - for (const dir of watchFiles) { - if (!watchers.find(watcher => watcher.dir === dir)) { - dlog(`watching dir ${dir} now`); - const watcher = fs.watch(dir, onChange); - watchers.push({ dir: dir, watcher: watcher }); - } else { - // console.log(dir, 'already watched') - } - } - } - - /** - * @param {string | null} fileName - */ - function checkIsRebuildReason(fileName) { - // Return true if filename is nil, filename is only provided on Linux, macOS, Windows, and AIX. - // On other systems, we just have to assume that any change is valid. - // This could cause problems if source builds (generating js files in the same directory) are supported. - if (!fileName) return true; - - return ( - ((fileName.endsWith(".res") || fileName.endsWith(".resi")) && - !watchGenerated.includes(fileName)) || - fileName === resConfig - ); - } - - /** - * @return {boolean} - */ - function needRebuild() { - return reasonsToRebuild.length !== 0; - } - - /** - * @param {number} code - */ - function buildFinishedCallback(code) { - if (code === 0) { - LAST_SUCCESS_BUILD_STAMP = Date.now(); - notifyClients(); - } - logFinishCompiling(code); - releaseBuild(); - if (needRebuild()) { - build(0); - } else { - watchBuild(getProjectFiles(sourcedirs)); - } - } - - /** - * TODO: how to make it captured by vscode - * @param error {string} - * @param highlight {string} - */ - function outputError(error, highlight) { - if (shouldColorizeError && highlight) { - process.stderr.write( - error.replace(highlight, `\x1b[1;31m${highlight}\x1b[0m`), - ); - } else { - process.stderr.write(error); - } - } - - // Note this function filters the error output - // it relies on the fact that ninja will merege stdout and stderr - // of the compiler output, if it does not - // then we should have a way to not filter the compiler output - /** - * - * @param {number} depth - */ - function build(depth) { - if (reasonsToRebuild.length === 0) { - dlog("No need to rebuild"); - return; - } - dlog(`Rebuilding since ${reasonsToRebuild}`); - const p = acquireBuild(rescriptWatchBuildArgs, { - stdio: ["inherit", "inherit", "pipe"], - }); - if (p) { - logStartCompiling(); - p.on("data", s => { - outputError(s, "ninja: error"); - }) - .once("exit", buildFinishedCallback) - .stderr.setEncoding("utf8"); - // This is important to clean up all - // previous queued events - reasonsToRebuild = []; - LAST_BUILD_START = Date.now(); - } - // if acquiring lock failed, no need retry here - // since buildFinishedCallback will try again - // however this is no longer the case for multiple-process - // it could fail due to other issues like .bsb.lock - else { - dlog( - `Acquire lock failed, do the build later ${depth} : ${reasonsToRebuild}`, - ); - const waitTime = 2 ** depth * 40; - setTimeout(() => { - build(Math.min(depth + 1, 5)); - }, waitTime); - } - } - - /** - * - * @param {fs.WatchEventType} event - * @param {string | null} reason - */ - function onChange(event, reason) { - const eventTime = Date.now(); - const timeDiff = eventTime - LAST_BUILD_START; - const eventDiff = eventTime - LAST_FIRED_EVENT; - dlog(`Since last build: ${timeDiff} -- ${eventDiff}`); - if (timeDiff < 5 || eventDiff < 5) { - // for 5ms, we could think that the ninja not get - // kicked yet, so there is really no need - // to send more events here - - // note reasonsToRebuild also - // helps avoid redundant build, but this will - // save the event loop call `setImmediate` - return; - } - if (checkIsRebuildReason(reason)) { - dlog(`\nEvent ${event} ${reason}`); - LAST_FIRED_EVENT = eventTime; - reasonsToRebuild.push([event, reason || ""]); - // Some editors are using temporary files to store edits. - // This results in two sync change events: change + rename and two sync builds. - // Using setImmediate will ensure that only one build done. - setImmediate(() => { - if (needRebuild()) { - if (process.env.BS_WATCH_CLEAR && console.clear) { - console.clear(); - } - build(0); - } - }); - } - } - - /** - * - * @param {boolean} withWebSocket - */ - function startWatchMode(withWebSocket) { - if (withWebSocket) { - setUpWebSocket(); - } - // for column one based error message - - process.stdin.on("close", exitProcess); - // close when stdin stops - if (os.platform() !== "win32") { - process.stdin.on("end", exitProcess); - process.stdin.resume(); - } - - watchers.push({ watcher: fs.watch(resConfig, onChange), dir: resConfig }); - } - - logStartCompiling(); - delegate(["build", ...args], _ => { - startWatchMode(withWebSocket); - buildFinishedCallback(0); - }); -} - -/** - * @param {Array} args - */ -export function build(args) { - // We want to show the compile time for build - // But bsb might show a help message when --help or invalid arguments are passed - // We don't want to show the compile time in that case - // But since we don't have a proper parsing, - // we can be sure about that only when building without any additional args - if (args.length === 0) { - logStartCompiling(); - delegate(["build"], exitCode => { - logFinishCompiling(exitCode); - process.exit(exitCode); - }); - return; - } - if (args.some(arg => ["help", "-h", "-help", "--help"].includes(arg))) { - delegate(["build", "-h"]); - return; - } - if (args.includes("-w")) { - watch(args); - return; - } - delegate(["build", ...args]); -} diff --git a/cli/rescript-legacy.js b/cli/rescript-legacy.js deleted file mode 100755 index c464f468a69..00000000000 --- a/cli/rescript-legacy.js +++ /dev/null @@ -1,135 +0,0 @@ -#!/usr/bin/env node - -// @ts-check - -// This script is supposed to be running in project root directory -// It matters since we need read .sourcedirs(location) -// and its content are file/directories with regard to project root - -import * as fs from "node:fs"; -import * as tty from "node:tty"; - -import { bsc_exe, rescript_legacy_exe } from "./common/bins.js"; -import * as bsb from "./common/bsb.js"; - -const cwd = process.cwd(); -process.env.BSB_PROJECT_ROOT = cwd; - -if (process.env.FORCE_COLOR === undefined) { - if (tty.isatty(1)) { - process.env.FORCE_COLOR = "1"; - process.env.NINJA_ANSI_FORCED = "1"; - } -} else { - if ( - process.env.FORCE_COLOR === "1" && - process.env.NINJA_ANSI_FORCED === undefined - ) { - process.env.NINJA_ANSI_FORCED = "1"; - } - if (process.argv.includes("-verbose")) { - console.log(`FORCE_COLOR: "${process.env.FORCE_COLOR}"`); - } -} - -const helpMessage = `Usage: rescript - -\`rescript\` is equivalent to \`rescript build\` - -Options: - -v, -version display version number - -h, -help display help - -Subcommands: - build - clean - format - dump - help - -Run \`rescript -h\` for subcommand help. Examples: - rescript build -h - rescript format -h`; - -function onUncaughtException(err) { - console.error("Uncaught Exception", err); - bsb.releaseBuild(); - process.exit(1); -} - -function exitProcess() { - bsb.releaseBuild(); - process.exit(0); -} - -process.on("uncaughtException", onUncaughtException); - -// OS signal handlers -// Ctrl+C -process.on("SIGINT", exitProcess); -// kill pid -try { - process.on("SIGUSR1", exitProcess); - process.on("SIGUSR2", exitProcess); - process.on("SIGTERM", exitProcess); - process.on("SIGHUP", exitProcess); -} catch (_e) { - // Deno might throw an error here, see https://github.com/denoland/deno/issues/9995 - // TypeError: Windows only supports ctrl-c (SIGINT) and ctrl-break (SIGBREAK). -} - -const args = process.argv.slice(2); -const argPatterns = { - help: ["help", "-h", "-help", "--help"], - version: ["version", "-v", "-version", "--version"], -}; - -const helpArgIndex = args.findIndex(arg => argPatterns.help.includes(arg)); -const firstPositionalArgIndex = args.findIndex(arg => !arg.startsWith("-")); - -if ( - helpArgIndex !== -1 && - (firstPositionalArgIndex === -1 || helpArgIndex <= firstPositionalArgIndex) -) { - console.log(helpMessage); -} else if (argPatterns.version.includes(args[0])) { - const packageSpec = JSON.parse( - fs.readFileSync(new URL("../package.json", import.meta.url), "utf-8"), - ); - - console.log(packageSpec.version); -} else if (firstPositionalArgIndex !== -1) { - const subcmd = args[firstPositionalArgIndex]; - const subcmdArgs = args.slice(firstPositionalArgIndex + 1); - - switch (subcmd) { - case "info": { - bsb.info(subcmdArgs); - break; - } - case "clean": { - bsb.clean(subcmdArgs); - break; - } - case "build": { - bsb.build(subcmdArgs); - break; - } - case "format": { - const mod = await import("./rescript-legacy/format.js"); - await mod.main(subcmdArgs, rescript_legacy_exe, bsc_exe); - break; - } - case "dump": { - const mod = await import("./rescript-legacy/dump.js"); - mod.main(subcmdArgs, rescript_legacy_exe, bsc_exe); - break; - } - default: { - console.error(`Error: Unknown command "${subcmd}".\n${helpMessage}`); - process.exit(2); - } - } -} else { - bsb.build(args); -} diff --git a/cli/rescript-legacy/dump.js b/cli/rescript-legacy/dump.js deleted file mode 100644 index 3acf52a2515..00000000000 --- a/cli/rescript-legacy/dump.js +++ /dev/null @@ -1,57 +0,0 @@ -// @ts-check - -import * as child_process from "node:child_process"; -import * as path from "node:path"; - -import * as arg from "#cli/args"; - -const dump_usage = `Usage: rescript dump [target] -\`rescript dump\` dumps the information for the target -`; - -/** - * @type {arg.specs} - */ -const specs = []; - -/** - * @param {string[]} argv - * @param {string} rescript_legacy_exe - * @param {string} bsc_exe - */ -export function main(argv, rescript_legacy_exe, bsc_exe) { - let target; - arg.parse_exn(dump_usage, argv, specs, xs => { - if (xs.length !== 1) { - arg.bad_arg(`Expect only one target, ${xs.length} found`); - } - target = xs[0]; - }); - - const { ext } = path.parse(target); - if (ext !== ".cmi") { - console.error("Only .cmi target allowed"); - process.exit(2); - } - - let output = child_process.spawnSync( - rescript_legacy_exe, - ["build", "--", target], - { - encoding: "utf-8", - }, - ); - if (output.status !== 0) { - console.log(output.stdout); - console.error(output.stderr); - process.exit(2); - } - output = child_process.spawnSync(bsc_exe, [path.join("lib", "bs", target)], { - encoding: "utf-8", - }); - console.log(output.stdout.trimEnd()); - if (output.status !== 0) { - console.error(output.stderr); - process.exit(2); - } -} diff --git a/cli/rescript-legacy/format.js b/cli/rescript-legacy/format.js deleted file mode 100644 index 9fc1759b8da..00000000000 --- a/cli/rescript-legacy/format.js +++ /dev/null @@ -1,255 +0,0 @@ -// @ts-check - -import * as child_process from "node:child_process"; -import * as crypto from "node:crypto"; -import * as fs from "node:fs"; -import * as asyncFs from "node:fs/promises"; -import * as os from "node:os"; -import * as path from "node:path"; -import { promisify } from "node:util"; - -import * as arg from "#cli/args"; - -const asyncExecFile = promisify(child_process.execFile); - -const format_usage = `Usage: rescript format [files] - -\`rescript format\` formats the current directory -`; - -/** - * @type {arg.stringref} - */ -const stdin = { val: "" }; - -/** - * @type {arg.boolref} - */ -const format = { val: false }; - -/** - * @type {arg.boolref} - */ -const check = { val: false }; - -/** - * @type{arg.specs} - */ -const specs = [ - [ - "-stdin", - { kind: "String", data: { kind: "String_set", data: stdin } }, - `[.res|.resi] Read the code from stdin and print -the formatted code to stdout in ReScript syntax`, - ], - [ - "-all", - { kind: "Unit", data: { kind: "Unit_set", data: format } }, - "Format the whole project ", - ], - [ - "-check", - { kind: "Unit", data: { kind: "Unit_set", data: check } }, - "Check formatting for file or the whole project. Use `-all` to check the whole project", - ], -]; -const formattedStdExtensions = [".res", ".resi"]; -const formattedFileExtensions = [".res", ".resi"]; - -/** - * - * @param {string[]} extensions - */ -function hasExtension(extensions) { - /** - * @param {string} x - */ - const pred = x => extensions.some(ext => x.endsWith(ext)); - return pred; -} -async function readStdin() { - const stream = process.stdin; - const chunks = []; - for await (const chunk of stream) chunks.push(chunk); - return Buffer.concat(chunks).toString("utf8"); -} - -const _numThreads = os.cpus().length; - -/** - * Splits an array into smaller chunks of a specified size. - * - * @template T - * @param {T[]} array - The array to split into chunks. - * @param {number} chunkSize - The size of each chunk. - * @returns {T[][]} - An array of chunks, where each chunk is an array of type T. - */ -function chunkArray(array, chunkSize) { - /** @type {T[][]} */ - const result = []; - - for (let i = 0; i < array.length; i += chunkSize) { - result.push(array.slice(i, i + chunkSize)); - } - - return result; -} - -/** - * @param {string[]} files - * @param {string} bsc_exe - * @param {(x: string) => boolean} isSupportedFile - * @param {boolean} checkFormatting - */ -async function formatFiles(files, bsc_exe, isSupportedFile, checkFormatting) { - const supportedFiles = files.filter(isSupportedFile); - const batchSize = 4 * os.cpus().length; - const batches = chunkArray(supportedFiles, batchSize); - - let incorrectlyFormattedFiles = 0; - try { - for (const batch of batches) { - await Promise.all( - batch.map(async file => { - const flags = checkFormatting - ? ["-format", file] - : ["-o", file, "-format", file]; - const { stdout } = await asyncExecFile(bsc_exe, flags); - if (check.val) { - const original = await asyncFs.readFile(file, "utf-8"); - if (original !== stdout) { - console.error("[format check]", file); - incorrectlyFormattedFiles++; - } - } - }), - ); - } - } catch (err) { - console.error(err); - process.exit(2); - } - if (incorrectlyFormattedFiles > 0) { - if (incorrectlyFormattedFiles === 1) { - console.error("The file listed above needs formatting"); - } else { - console.error( - `The ${incorrectlyFormattedFiles} files listed above need formatting`, - ); - } - process.exit(3); - } -} - -/** - * @param {string[]} argv - * @param {string} rescript_legacy_exe - * @param {string} bsc_exe - */ -export async function main(argv, rescript_legacy_exe, bsc_exe) { - const isSupportedFile = hasExtension(formattedFileExtensions); - const isSupportedStd = hasExtension(formattedStdExtensions); - - try { - /** - * @type {string[]} - */ - let files = []; - arg.parse_exn(format_usage, argv, specs, xs => { - files = xs; - }); - - const format_project = format.val; - const use_stdin = stdin.val; - - // Only -check arg - // Require: -all or path to a file - if (check.val && !format_project && files.length === 0) { - console.error( - "format check require path to a file or use `-all` to check the whole project", - ); - process.exit(2); - } - - if (format_project) { - if (use_stdin || files.length !== 0) { - console.error("format -all can not be in use with other flags"); - process.exit(2); - } - // -all - // TODO: check the rest arguments - const output = child_process.spawnSync( - rescript_legacy_exe, - ["info", "-list-files"], - { - encoding: "utf-8", - }, - ); - if (output.status !== 0) { - console.error(output.stdout); - console.error(output.stderr); - process.exit(2); - } - files = output.stdout.split("\n").map(x => x.trim()); - await formatFiles(files, bsc_exe, isSupportedFile, check.val); - } else if (use_stdin) { - if (check.val) { - console.error("format -stdin cannot be used with -check flag"); - process.exit(2); - } - if (isSupportedStd(use_stdin)) { - const randomHex = crypto.randomBytes(8).toString("hex"); - const basename = path.basename(use_stdin); - const filename = path.join( - os.tmpdir(), - `rescript_${randomHex}${basename}`, - ); - (async () => { - const content = await readStdin(); - const fd = fs.openSync(filename, "wx", 0o600); // Avoid overwriting existing file - fs.writeFileSync(fd, content, "utf8"); - fs.closeSync(fd); - process.addListener("exit", () => fs.unlinkSync(filename)); - child_process.execFile( - bsc_exe, - ["-format", filename], - (error, stdout, stderr) => { - if (error === null) { - process.stdout.write(stdout); - } else { - console.error(stderr); - process.exit(2); - } - }, - ); - })(); - } else { - console.error(`Unsupported extension ${use_stdin}`); - console.error(`Supported extensions: ${formattedStdExtensions} `); - process.exit(2); - } - } else { - if (files.length === 0) { - // none of argumets set - // format the current directory - files = fs.readdirSync(process.cwd()).filter(isSupportedFile); - } - - for (const file of files) { - if (!isSupportedStd(file)) { - console.error(`Don't know what do with ${file}`); - console.error(`Supported extensions: ${formattedFileExtensions}`); - process.exit(2); - } - } - await formatFiles(files, bsc_exe, isSupportedFile, check.val); - } - } catch (e) { - if (e instanceof arg.ArgError) { - console.error(e.message); - process.exit(2); - } else { - throw e; - } - } -} diff --git a/compiler/bsb/bsb.md b/compiler/bsb/bsb.md deleted file mode 100644 index e081fa461d9..00000000000 --- a/compiler/bsb/bsb.md +++ /dev/null @@ -1,201 +0,0 @@ -# Phony targets - - -# collect file groups - -1. we need check integrity of files here? -cases: -one directory have two same files -- ignore, does not matter here? -two directories ha - -# generate ninja from file groups - -`Bsb_file_groups.file_group list` -one directory, one kind -- -when we merge we will have two `dirs` - -do we allow duplicate modules? - -suppose : -lib -> 0 - -dev -> 1 -dev -> 2 -dev -> 3 - -so that they can have same names - - -.default -All output (not just js in case we support native build) - -.install - -It's hard to bake it in built rules, since it is flag dependent, if you have `-bin-annot` -then you would like to install `cmt` too, however, it might be or not be there - -# meta-data files - -- .bsbuild -- .bsdeps -- .sourcedirs -# post-build - -Here we have `js` generated, we can do either post-build or -create a new rule. - -Note creating new rules will get more concurrency while post-build is easy -and can do in source modification - -https://groups.google.com/forum/#!searchin/ninja-build/post$20process%7Csort:relevance/ninja-build/Q4hpcDmhPzw/KZpDyOEFuTkJ - -# Performance tweaks - -Writing(truncating) files are significantly slower (20~30x) and it destroys cache(see Appendix), we should -try to avoid writing too many files. - -bsb is optimized for incremental build (especially for modifying files ). - -There is a trade off here: if we generate `.bsdep` file, whenever adding or removing file, `.bscache` will not -impact the integrity of `.bsdep`, so that it will run `.bsdep -> .d`. - -The downside is -1. clean build will generate more smaller files (slow down), -2. build system has to track more outputs (latency, stat more files almost doubled) - -Whenever we change a file it will regenerate the ast, optionally update `.bsdep` - -So assuming that merge `.bsdep` into `.mlast`, build system will track not more files. -The integrity of `.mlast` is not impacted by `.bscache`. -`.mlast -> .d` can be still improved, not as good as `.bsdep -> .d` since `.bsdep` -can check `.bsdep` time stamp. - -So let's change the `.mlast` to such format - ----------- -magic number -length of dependent modules -dependent modules -binary ast ----------- - -This file is integrity is not impacted by `.bscache`. whenever `.bscache` changes we check if we need regenerate `.d` - -# Appendix -[source,ocaml] --------------- -module Set_string = Set.Make(String) - -(* let v = Set_string.of_list ["List" ; "Set" ; "String" ; "Test_order"] *) -let v = Set_string.of_list [] -let deseralize f = - let ichan = open_in_bin f in - let v : Set_string.t = input_value ichan in - close_in ichan ; - v - -let time f arg = - let v0 = Unix.gettimeofday () in - ignore @@ f arg; - let v1 = Unix.gettimeofday () in - print_endline (Printf.sprintf "%f elapsed" (v1 -. v0)) - -let deseralize_and_compare f = - ignore @@ Set_string.equal v (deseralize f) - -let seralize f = - let ochan = open_out_bin f in - output_value ochan v ; - close_out ochan - -let try_seralize f = - match open_in_bin f with - | ichan -> - close_in ichan ; - let ochan = open_out_bin f in - output_value ochan v ; - close_out ochan - | exception _ -> - let ochan = open_out_bin f in - output_value ochan v ; - close_out ochan - -let try_seralize2 f = - if Sys.file_exists f then - let ochan = open_out_bin f in - output_value ochan v ; - close_out ochan - else - let ochan = open_out_bin f in - output_value ochan v ; - close_out ochan - - - -let () = - let file = "/Users/hzhang295/git/tmp/bench/e.mldeps" in - time try_seralize file; - Unix.unlink file ; - time try_seralize2 file; - Unix.unlink file ; - time seralize file; - time deseralize_and_compare file; - Unix.unlink file - -(* -0.002452 elapsed -0.002440 elapsed -0.001954 elapsed -0.000079 elapsed - -*) - --------------- - -# package-flags - - when designing bsc command line flags, we ask user to specify the output path of package output - instead of calculating, - the reason is that the user input can be absolute path or relative path, to calculate - we also need the location of package.json. - - - ## document when regenerating `build.ninja` - - - when `bsb.exe` path is changed - - when `bsb.exe` version is changed - - ## other internal options - --no-dev -- don't build dev directory group --install -- install files - -## document when regenerating `.merlin` - -## TODO: seems we can do it - -1. instead of specifying the whole relative path, just specifying the offset - ``` --bs-package-output commonjs:+lib/js -bs-package-output amdjs:+lib/amdjs xx.mlast - ``` - - With this we would simplify the build a lot. - - on Windows - ``` - -bs-package-output commonjs:+lib\js -bs-package-output:+lib\amdjs xx.ml a/b/c/xx.mlast - ``` - - so when the user input is relative path, we do the concat, - if it is absolute path, we calculate the relative path first. - - This is complicated vs - - ``` - -bs-package-output commonjs -bs-package-output amdjs - ``` - - however, the bsc is almost sitting in `lib/bs` - -2. caching Directory operations -3. Read `bsconfig.json` from watchcer side, so that we can caching io operations more effeciently? diff --git a/compiler/bsb/bsb_arg.ml b/compiler/bsb/bsb_arg.ml deleted file mode 100644 index aa7183e5a77..00000000000 --- a/compiler/bsb/bsb_arg.ml +++ /dev/null @@ -1,122 +0,0 @@ -(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type anon_fun = rev_args:string list -> unit - -type string_action = - | String_call of (string -> unit) - | String_set of string ref - -type unit_action = Unit_call of (unit -> unit) | Unit_set of bool ref - -type spec = Unit of unit_action | String of string_action - -type error = Unknown of string | Missing of string - -type t = spec Ext_spec.t - -exception Bad of string - -let bad_arg s = raise_notrace (Bad s) - -let ( +> ) = Ext_buffer.add_string - -let usage_b (buf : Ext_buffer.t) ~usage (speclist : t) = - buf +> usage; - if Ext_array.is_empty speclist then () - else ( - buf +> "\nOptions:\n"; - let max_col = ref 0 in - Ext_array.iter speclist (fun (key, _, doc) -> - if - (not (Ext_string.starts_with doc "*internal*")) - && String.length key > !max_col - then max_col := String.length key); - Ext_array.iter speclist (fun (key, _, doc) -> - if not (Ext_string.starts_with doc "*internal*") then ( - buf +> " "; - buf +> key; - buf +> String.make (!max_col - String.length key + 2) ' '; - let cur = ref 0 in - let doc_length = String.length doc in - while !cur < doc_length do - if !cur <> 0 then ( - buf +> "\n"; - buf +> String.make (!max_col + 4) ' '); - match String.index_from_opt doc !cur '\n' with - | None -> - buf +> String.sub doc !cur (String.length doc - !cur); - cur := doc_length - | Some new_line_pos -> - buf +> String.sub doc !cur (new_line_pos - !cur); - cur := new_line_pos + 1 - done; - buf +> "\n"))) - -let stop_raise ~usage ~(error : error) (speclist : t) = - let b = Ext_buffer.create 200 in - (match error with - | Unknown ("-help" | "--help" | "-h") -> - usage_b b ~usage speclist; - Ext_buffer.output_buffer stdout b; - exit 0 - | Unknown s -> - b +> "Unknown option \""; - b +> s; - b +> "\".\n" - | Missing s -> - b +> "Option \""; - b +> s; - b +> "\" needs an argument.\n"); - usage_b b ~usage speclist; - bad_arg (Ext_buffer.contents b) - -let parse_exn ~usage ~argv ?(start = 1) ?(finish = Array.length argv) - (speclist : t) anonfun = - let current = ref start in - let rev_list = ref [] in - while !current < finish do - let s = argv.(!current) in - incr current; - if s <> "" && s.[0] = '-' then - match Ext_spec.assoc3 speclist s with - | Some action -> ( - match action with - | Unit r -> ( - match r with - | Unit_set r -> r.contents <- true - | Unit_call f -> f ()) - | String f -> ( - if !current >= finish then - stop_raise ~usage ~error:(Missing s) speclist - else - let arg = argv.(!current) in - incr current; - match f with - | String_call f -> f arg - | String_set u -> u.contents <- arg)) - | None -> stop_raise ~usage ~error:(Unknown s) speclist - else rev_list := s :: !rev_list - done; - anonfun ~rev_args:!rev_list diff --git a/compiler/bsb/bsb_arg.mli b/compiler/bsb/bsb_arg.mli deleted file mode 100644 index 60d21f2adc6..00000000000 --- a/compiler/bsb/bsb_arg.mli +++ /dev/null @@ -1,46 +0,0 @@ -(* Copyright (C) 2020 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type string_action = - | String_call of (string -> unit) - | String_set of string ref - -type unit_action = Unit_call of (unit -> unit) | Unit_set of bool ref - -type spec = Unit of unit_action | String of string_action - -type anon_fun = rev_args:string list -> unit - -exception Bad of string - -val parse_exn : - usage:string -> - argv:string array -> - ?start:int -> - ?finish:int -> - (string * spec * string) array -> - anon_fun -> - unit - -val bad_arg : string -> 'a diff --git a/compiler/bsb/bsb_build_schemas.ml b/compiler/bsb/bsb_build_schemas.ml deleted file mode 100644 index de5f9e18b8e..00000000000 --- a/compiler/bsb/bsb_build_schemas.ml +++ /dev/null @@ -1,66 +0,0 @@ -(* Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let name = "name" - -let ppx_flags = "ppx-flags" -let pp_flags = "pp-flags" -let bs_external_includes = "bs-external-includes" -let dependencies = "dependencies" -let bs_dependencies = "bs-dependencies" -let dev_dependencies = "dev-dependencies" -let bs_dev_dependencies = "bs-dev-dependencies" -let sources = "sources" -let dir = "dir" -let files = "files" -let subdirs = "subdirs" -let compiler_flags = "compiler-flags" -let bsc_flags = "bsc-flags" -let excludes = "excludes" -let slow_re = "slow-re" -let resources = "resources" -let public = "public" -let js_post_build = "js-post-build" -let cmd = "cmd" -let package_specs = "package-specs" -let type_ = "type" -let export_all = "all" -let export_none = "none" - -let jsx = "jsx" -let jsx_version = "version" -let jsx_module = "module" -let cut_generators = "cut-generators" -let generators = "generators" -let command = "command" -let edge = "edge" -let namespace = "namespace" -let in_source = "in-source" -let warnings = "warnings" -let number = "number" -let error = "error" -let suffix = "suffix" -let gentypeconfig = "gentypeconfig" -let language = "language" -let ignored_dirs = "ignored-dirs" diff --git a/compiler/bsb/bsb_build_util.ml b/compiler/bsb/bsb_build_util.ml deleted file mode 100644 index 72e8ed51410..00000000000 --- a/compiler/bsb/bsb_build_util.ml +++ /dev/null @@ -1,211 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let flag_concat flag xs = - String.concat Ext_string.single_space - (Ext_list.flat_map xs (fun x -> [flag; x])) - -let ( // ) = Ext_path.combine - -let ppx_flags (xs : Bsb_config_types.ppx list) = - flag_concat "-ppx" - (Ext_list.map xs (fun x -> - if x.args = [] then Ext_filename.maybe_quote x.name - else - let fmt : _ format = - if Ext_sys.is_windows_or_cygwin then "\"%s %s\"" else "'%s %s'" - in - Printf.sprintf fmt x.name (String.concat " " x.args))) - -let pp_flag (xs : string) = "-pp " ^ Ext_filename.maybe_quote xs - -let include_dirs dirs = - String.concat Ext_string.single_space - (Ext_list.flat_map dirs (fun x -> ["-I"; Ext_filename.maybe_quote x])) - -let include_dirs_by dirs fn = - String.concat Ext_string.single_space - (Ext_list.flat_map dirs (fun x -> ["-I"; Ext_filename.maybe_quote (fn x)])) - -(* we use lazy $src_root_dir *) - -(* It does several conversion: - First, it will convert unix path to windows backward on windows platform. - Then if it is absolute path, it will do thing - Else if it is relative path, it will be rebased on project's root directory *) - -let convert_and_resolve_path : string -> string -> string = - if Sys.unix then ( // ) - else fun cwd path -> - if Ext_sys.is_windows_or_cygwin then - let p = Ext_string.replace_slash_backward path in - cwd // p - else failwith ("Unknown OS :" ^ Sys.os_type) -(* we only need convert the path in the beginning *) - -type result = {path: string; checked: bool} - -(* Magic path resolution: - foo => foo - foo/ => /absolute/path/to/projectRoot/node_modules/foo - foo/bar => /absolute/path/to/projectRoot/node_modules/foo/bar - /foo/bar => /foo/bar - ./foo/bar => /absolute/path/to/projectRoot/./foo/bar - Input is node path, output is OS dependent (normalized) path -*) -let resolve_bsb_magic_file ~cwd ~desc p : result = - let no_slash = Ext_string.no_slash_idx p in - if no_slash < 0 then - (* Single file FIXME: better error message for "" input *) - {path = p; checked = false} - else - let first_char = String.unsafe_get p 0 in - if Filename.is_relative p && first_char <> '.' then - let package_name, rest = Bsb_pkg_types.extract_pkg_name_and_file p in - let relative_path = - if Ext_sys.is_windows_or_cygwin then - Ext_string.replace_slash_backward rest - else rest - in - (* let p = if Ext_sys.is_windows_or_cygwin then Ext_string.replace_slash_backward p else p in *) - let package_dir = Bsb_pkg.resolve_bs_package ~cwd package_name in - let path = package_dir // relative_path in - if Sys.file_exists path then {path; checked = true} - else ( - Bsb_log.error "@{Could not resolve @} %s in %s@." p cwd; - failwith (p ^ " not found when resolving " ^ desc)) - else - (* relative path [./x/y]*) - {path = convert_and_resolve_path cwd p; checked = true} - -(** converting a file from Linux path format to Windows *) - -(** - {[ - mkp "a/b/c/d";; - mkp "/a/b/c/d" - ]} -*) -let rec mkp dir = - if not (Sys.file_exists dir) then - let parent_dir = Filename.dirname dir in - if parent_dir = Filename.current_dir_name then Unix.mkdir dir 0o777 - (* leaf node *) - else ( - mkp parent_dir; - Unix.mkdir dir 0o777) - else if not @@ Sys.is_directory dir then - failwith (dir ^ " exists but it is not a directory, plz remove it first") - else () - -let get_list_string_acc (s : Ext_json_types.t array) acc = - Ext_array.to_list_map_acc s acc (fun x -> - match x with - | Str x -> Some x.str - | _ -> None) - -let get_list_string s = get_list_string_acc s [] - -(* Key is the path *) -let ( |? ) m (key, cb) = m |> Ext_json.test key cb - -type top = Expect_none | Expect_name of string - -type package_context = {proj_dir: string; top: top} - -(** - TODO: check duplicate package name - ?use path as identity? - - Basic requirements - 1. cycle detection - 2. avoid duplication - 3. deterministic, since -make-world will also comes with -clean-world - -*) - -let pp_packages_rev ppf lst = - Ext_list.rev_iter lst (fun s -> Format.fprintf ppf "%s " s) - -let rec walk_all_deps_aux (visited : string Hash_string.t) (paths : string list) - ~(top : top) (dir : string) (queue : _ Queue.t) = - match - Bsb_config_load.load_json ~per_proj_dir:dir ~warn_legacy_config:false - with - | _, Obj {map; loc} -> - let cur_package_name = - match Map_string.find_opt map Bsb_build_schemas.name with - | Some (Str {str; loc}) -> - (match top with - | Expect_none -> () - | Expect_name s -> - if s <> str then - Bsb_exception.errorf ~loc - "package name is expected to be %s but got %s" s str); - str - | Some _ | None -> - Bsb_exception.errorf ~loc "package name missing in %s/rescript.json" dir - in - if Ext_list.mem_string paths cur_package_name then ( - Bsb_log.error "@{Cyclic dependencies in package stack@}@."; - exit 2); - let package_stacks = cur_package_name :: paths in - Bsb_log.info "@{Package stack:@} %a @." pp_packages_rev package_stacks; - if Hash_string.mem visited cur_package_name then - Bsb_log.info "@{Visited before@} %s@." cur_package_name - else - let explore_deps (deps : string) = - map - |? ( deps, - `Arr - (fun (new_packages : Ext_json_types.t array) -> - Ext_array.iter new_packages (fun js -> - match js with - | Str {str = new_package} -> - let package_dir = - Bsb_pkg.resolve_bs_package ~cwd:dir - (Bsb_pkg_types.string_as_package new_package) - in - walk_all_deps_aux visited package_stacks - ~top:(Expect_name new_package) package_dir queue - | _ -> Bsb_exception.errorf ~loc "%s expect an array" deps)) - ) - |> ignore - in - explore_deps Bsb_build_schemas.dependencies; - explore_deps Bsb_build_schemas.bs_dependencies; - (match top with - | Expect_none -> - explore_deps Bsb_build_schemas.dev_dependencies; - explore_deps Bsb_build_schemas.bs_dev_dependencies - | Expect_name _ -> ()); - Queue.add {top; proj_dir = dir} queue; - Hash_string.add visited cur_package_name dir - | _ -> () - -let walk_all_deps dir : package_context Queue.t = - let visited = Hash_string.create 0 in - let cb = Queue.create () in - walk_all_deps_aux visited [] ~top:Expect_none dir cb; - cb diff --git a/compiler/bsb/bsb_build_util.mli b/compiler/bsb/bsb_build_util.mli deleted file mode 100644 index b9375b1c17d..00000000000 --- a/compiler/bsb/bsb_build_util.mli +++ /dev/null @@ -1,87 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val flag_concat : string -> string list -> string -(** - Use: - {[ - flag_concat "-ppx" [ppxs] - ]} -*) - -val ppx_flags : Bsb_config_types.ppx list -> string -(** - Build quoted commandline arguments for bsc.exe for the given ppx flags - - Use: - {[ - ppx_flags [ppxs] - ]} -*) - -val pp_flag : string -> string - -val include_dirs : string list -> string -(** - Build unquoted command line arguments for bsc.exe for the given include dirs - - Use: - {[ - include_dirs [dirs] - ]} -*) - -val include_dirs_by : 'a list -> ('a -> string) -> string - -val mkp : string -> unit - -(* The path of [bsc] and [bsdep] is normalized so that the invokation of [./compiler/bin/bsb.exe] - and [bsb.exe] (combined with a dirty rescript.json) will not trigger unnecessary rebuild. - - The location of [bsc] and [bsdep] is configured by the combination of [Sys.executable_name] - and [cwd]. - - In theory, we should also check the integrity of [bsb.exe], if it is changed, the rebuild - should be regen, but that is too much in practice, not only you need check the integrity of - path of [bsb.exe] but also the timestamp, to make it 100% correct, also the integrity of - [bsdep.exe] [bsc.exe] etc. -*) - -val get_list_string_acc : Ext_json_types.t array -> string list -> string list - -val get_list_string : Ext_json_types.t array -> string list - -type top = Expect_none | Expect_name of string - -type result = {path: string; checked: bool} - -(* [resolve_bsb_magic_file] - returns a tuple (path,checked) - when checked is true, it means such file should exist without depending on env -*) -val resolve_bsb_magic_file : cwd:string -> desc:string -> string -> result - -type package_context = {proj_dir: string; top: top} - -val walk_all_deps : string -> package_context Queue.t diff --git a/compiler/bsb/bsb_clean.ml b/compiler/bsb/bsb_clean.ml deleted file mode 100644 index 1a5e801f842..00000000000 --- a/compiler/bsb/bsb_clean.ml +++ /dev/null @@ -1,62 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine - -let ninja_clean proj_dir = - try - let cmd = Bsb_global_paths.vendor_ninja in - let lib_artifacts_dir = Bsb_config.lib_bs in - let cwd = proj_dir // lib_artifacts_dir in - if Sys.file_exists cwd then - let eid = - Bsb_unix.run_command_execv {cmd; args = [|cmd; "-t"; "clean"|]; cwd} - in - if eid <> 0 then Bsb_log.warn "@{Failed@}@." - with e -> Bsb_log.warn "@{Failed@}: %s @." (Printexc.to_string e) - -let clean_bs_garbage proj_dir = - Bsb_log.info "@{Cleaning:@} in %s@." proj_dir; - let try_remove x = - let x = proj_dir // x in - if Sys.file_exists x then Bsb_unix.remove_dir_recursive x - in - try - Bsb_parse_sources.clean_re_js proj_dir; - (* clean re.js files*) - ninja_clean proj_dir; - Ext_list.iter Bsb_config.all_lib_artifacts try_remove - with e -> - Bsb_log.warn "@{Failed@} to clean due to %s" (Printexc.to_string e) - -let clean_bs_deps proj_dir = - let _, _ = Bsb_config_parse.deps_from_bsconfig () in - let queue = Bsb_build_util.walk_all_deps proj_dir in - Queue.iter - (fun (pkg_cxt : Bsb_build_util.package_context) -> - (* whether top or not always do the cleaning *) - clean_bs_garbage pkg_cxt.proj_dir) - queue - -let clean_self proj_dir = clean_bs_garbage proj_dir diff --git a/compiler/bsb/bsb_clean.mli b/compiler/bsb/bsb_clean.mli deleted file mode 100644 index bedebe82e79..00000000000 --- a/compiler/bsb/bsb_clean.mli +++ /dev/null @@ -1,31 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(** clean bsc generated artifacts. - TODO: clean stale in source js artifacts -*) - -val clean_bs_deps : string -> unit - -val clean_self : string -> unit diff --git a/compiler/bsb/bsb_config.ml b/compiler/bsb/bsb_config.ml deleted file mode 100644 index 6a1f586f988..00000000000 --- a/compiler/bsb/bsb_config.ml +++ /dev/null @@ -1,72 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) -let ( // ) = Ext_path.combine - -let lib_lit = "lib" - -let lib_js = lib_lit // "js" - -let lib_ocaml = lib_lit // "ocaml" - -let lib_bs = lib_lit // "bs" - -let lib_es6 = lib_lit // "es6" - -let lib_es6_global = lib_lit // "es6_global" - -let all_lib_artifacts = [lib_js; lib_ocaml; lib_bs; lib_es6; lib_es6_global] - -let rev_lib_bs = ".." // ".." - -(* access the js directory from "lib/bs", - it would be '../js' - - TODO: should be renamed, js -> cjs, es6 -> mjs in v12 -*) -let lib_bs_prefix_of_format (x : Ext_module_system.t) = - ".." - // - match x with - | Commonjs -> "js" - | Esmodule -> "es6" - | Es6_global -> "es6_global" - -(* lib/js, lib/es6, lib/es6_global *) -let top_prefix_of_format (x : Ext_module_system.t) = - match x with - | Commonjs -> lib_js - | Esmodule -> lib_es6 - | Es6_global -> lib_es6_global - -let rev_lib_bs_prefix p = rev_lib_bs // p - -let ocaml_bin_install_prefix p = lib_ocaml // p - -let proj_rel path = rev_lib_bs // path - -(** it may not be a bad idea to hard code the binary path - of bsb in configuration time -*) - -(* let cmd_package_specs = ref None *) diff --git a/compiler/bsb/bsb_config.mli b/compiler/bsb/bsb_config.mli deleted file mode 100644 index bcd6104c9ad..00000000000 --- a/compiler/bsb/bsb_config.mli +++ /dev/null @@ -1,49 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val ocaml_bin_install_prefix : string -> string - -val proj_rel : string -> string - -val lib_lit : string - -val lib_js : string - -val lib_bs : string - -val lib_es6 : string [@@ocaml.deprecated "will be removed in v12"] - -val lib_es6_global : string [@@ocaml.deprecated "will be removed in v12"] - -val lib_ocaml : string - -val all_lib_artifacts : string list - -(* we need generate path relative to [lib/bs] directory in the opposite direction *) -val rev_lib_bs_prefix : string -> string - -val lib_bs_prefix_of_format : Ext_module_system.t -> string - -val top_prefix_of_format : Ext_module_system.t -> string -(** default not install, only when -make-world, its dependencies will be installed *) diff --git a/compiler/bsb/bsb_config_load.ml b/compiler/bsb/bsb_config_load.ml deleted file mode 100644 index be46965801e..00000000000 --- a/compiler/bsb/bsb_config_load.ml +++ /dev/null @@ -1,26 +0,0 @@ -let ( // ) = Ext_path.combine - -let load_json ~(per_proj_dir : string) ~(warn_legacy_config : bool) : - string * Ext_json_types.t = - let filename, abs, in_chan = - let filename = Literals.rescript_json in - let abs = per_proj_dir // filename in - match open_in abs with - | in_chan -> (filename, abs, in_chan) - | exception e -> ( - let filename = Literals.bsconfig_json in - let abs = per_proj_dir // filename in - match open_in abs with - | in_chan -> (filename, abs, in_chan) - | exception _ -> raise e (* forward error from rescript.json *)) - in - if warn_legacy_config && filename = Literals.bsconfig_json then - print_endline - "Warning: bsconfig.json is deprecated. Migrate it to rescript.json\n"; - match Ext_json_parse.parse_json_from_chan abs in_chan with - | v -> - close_in in_chan; - (filename, v) - | exception e -> - close_in in_chan; - raise e diff --git a/compiler/bsb/bsb_config_load.mli b/compiler/bsb/bsb_config_load.mli deleted file mode 100644 index 7e8cd97857a..00000000000 --- a/compiler/bsb/bsb_config_load.mli +++ /dev/null @@ -1,2 +0,0 @@ -val load_json : - per_proj_dir:string -> warn_legacy_config:bool -> string * Ext_json_types.t diff --git a/compiler/bsb/bsb_config_parse.ml b/compiler/bsb/bsb_config_parse.ml deleted file mode 100644 index a185c56c443..00000000000 --- a/compiler/bsb/bsb_config_parse.ml +++ /dev/null @@ -1,330 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(* let get_list_string = Bsb_build_util.get_list_string *) -let ( // ) = Ext_path.combine - -let resolve_package cwd package_name = - let x = Bsb_pkg.resolve_bs_package ~cwd package_name in - { - Bsb_config_types.package_name; - package_install_path = x // Bsb_config.lib_ocaml; - } - -type json_map = Ext_json_types.t Map_string.t - -(* Key is the path *) -let ( |? ) m (key, cb) = m |> Ext_json.test key cb -let ( .?() ) = Map_string.find_opt - -(*TODO: it is a little mess that [cwd] and [project dir] are shared*) - -let extract_package_name_and_namespace (map : json_map) : string * string option - = - let package_name = - match map.?(Bsb_build_schemas.name) with - | Some (Str {str = "_"} as config) -> - Bsb_exception.config_error config "_ is a reserved package name" - | Some (Str {str = name}) -> name - | Some config -> - Bsb_exception.config_error config "name expect a string field" - | None -> Bsb_exception.invalid_spec "field name is required" - in - let namespace = - match map.?(Bsb_build_schemas.namespace) with - | None | Some (False _) -> None - | Some (True _) -> - Some (Ext_namespace.namespace_of_package_name package_name) - | Some (Str {str}) -> - (*TODO : check the validity of namespace *) - Some (Ext_namespace.namespace_of_package_name str) - | Some x -> - Bsb_exception.config_error x "namespace field expects string or boolean" - in - (package_name, namespace) - -(** - There are two things to check: - - the running bsb and vendoring bsb is the same - - the running bsb need delete stale build artifacts - (kinda check npm upgrade) - - Note if the setup is correct: - the running compiler and node_modules/rescript - should be the same version, - The exact check is that the running compiler should have a - compatible runtime version installed, the location of the - compiler is actually not relevant. - We disable the check temporarily - e.g, - ``` - bsc -runtime runtime_dir@version - ``` -*) -let extract_gentype_config (map : json_map) : Bsb_config_types.gentype_config = - match map.?(Bsb_build_schemas.gentypeconfig) with - | None -> false - | Some (Obj _) -> true - | Some config -> - Bsb_exception.config_error config "gentypeconfig expect an object" - -let extract_string (map : json_map) (field : string) cb = - match map.?(field) with - | None -> None - | Some (Str {str}) -> cb str - | Some config -> Bsb_exception.config_error config (field ^ " expect a string") - -let extract_boolean (map : json_map) (field : string) (default : bool) : bool = - match map.?(field) with - | None -> default - | Some (True _) -> true - | Some (False _) -> false - | Some config -> - Bsb_exception.config_error config (field ^ " expect a boolean") - -let extract_warning (map : json_map) = - match map.?(Bsb_build_schemas.warnings) with - | None -> Bsb_warning.use_default - | Some (Obj {map}) -> Bsb_warning.from_map map - | Some config -> Bsb_exception.config_error config "expect an object" - -let extract_ignored_dirs (map : json_map) : Set_string.t = - match map.?(Bsb_build_schemas.ignored_dirs) with - | None -> Set_string.empty - | Some (Arr {content}) -> - Set_string.of_list (Bsb_build_util.get_list_string content) - | Some config -> Bsb_exception.config_error config "expect an array of string" - -let extract_generators (map : json_map) = - let generators = ref Map_string.empty in - (match map.?(Bsb_build_schemas.generators) with - | None -> () - | Some (Arr {content = s}) -> - generators := - Ext_array.fold_left s Map_string.empty (fun acc json -> - match json with - | Obj {map = m; loc} -> ( - match - (m.?(Bsb_build_schemas.name), m.?(Bsb_build_schemas.command)) - with - | Some (Str {str = name}), Some (Str {str = command}) -> - Map_string.add acc name command - | _, _ -> - Bsb_exception.errorf ~loc - {| generators exepect format like { "name" : "cppo", "command" : "cppo $in -o $out"} |} - ) - | _ -> acc) - | Some config -> - Bsb_exception.config_error config - (Bsb_build_schemas.generators ^ " expect an array field")); - !generators - -let extract_dependencies (map : json_map) cwd (field : string) : - Bsb_config_types.dependencies = - match map.?(field) with - | None -> [] - | Some (Arr {content = s}) -> - Ext_list.map (Bsb_build_util.get_list_string s) (fun s -> - resolve_package cwd (Bsb_pkg_types.string_as_package s)) - | Some config -> Bsb_exception.config_error config (field ^ " expect an array") - -(* return an empty array if not found *) -let extract_string_list (map : json_map) (field : string) : string list = - match map.?(field) with - | None -> [] - | Some (Arr {content = s}) -> Bsb_build_util.get_list_string s - | Some config -> Bsb_exception.config_error config (field ^ " expect an array") - -let extract_ppx (map : json_map) (field : string) ~(cwd : string) : - Bsb_config_types.ppx list = - match map.?(field) with - | None -> [] - | Some (Arr {content}) -> - let resolve s = - if s = "" then - Bsb_exception.invalid_spec "invalid ppx, empty string found" - else - (Bsb_build_util.resolve_bsb_magic_file ~cwd - ~desc:Bsb_build_schemas.ppx_flags s) - .path - in - Ext_array.to_list_f content (fun x -> - match x with - | Str x -> {Bsb_config_types.name = resolve x.str; args = []} - | Arr {content} -> ( - let xs = Bsb_build_util.get_list_string content in - match xs with - | [] -> Bsb_exception.config_error x " empty array is not allowed" - | name :: args -> {Bsb_config_types.name = resolve name; args}) - | config -> - Bsb_exception.config_error config - (field ^ "expect each item to be either string or array")) - | Some config -> Bsb_exception.config_error config (field ^ " expect an array") - -let extract_js_post_build (map : json_map) cwd : string option = - let js_post_build_cmd = ref None in - map - |? ( Bsb_build_schemas.js_post_build, - `Obj - (fun m -> - m - |? ( Bsb_build_schemas.cmd, - `Str - (fun s -> - js_post_build_cmd := - Some - (Bsb_build_util.resolve_bsb_magic_file ~cwd - ~desc:Bsb_build_schemas.js_post_build s) - .path) ) - |> ignore) ) - |> ignore; - !js_post_build_cmd - -(** ATT: make sure such function is re-entrant. - With a given [cwd] it works anywhere*) -let interpret_json ~(filename : string) ~(json : Ext_json_types.t) - ~(package_kind : Bsb_package_kind.t) ~(per_proj_dir : string) : - Bsb_config_types.t = - (* we should not resolve it too early, - since it is external configuration, no {!Bsb_build_util.convert_and_resolve_path} - *) - - (* When we plan to add more deps here, - Make sure check it is consistent that for nested deps, we have a - quck check by just re-parsing deps - Make sure it works with [-make-world] [-clean-world] - *) - - (* Setting ninja is a bit complex - 1. if [build.ninja] does use [ninja] we need set a variable - 2. we need store it so that we can call ninja correctly - *) - match json with - | Obj {map} -> ( - let package_name, namespace = extract_package_name_and_namespace map in - let gentype_config = extract_gentype_config map in - - (* This line has to be before any calls to Bsb_global_backend.backend, because it'll read the entries - array from the bsconfig and set the backend_ref to the first entry, if any. *) - let pp_flags : string option = - extract_string map Bsb_build_schemas.pp_flags (fun p -> - if p = "" then - Bsb_exception.invalid_spec "invalid pp, empty string found" - else - Some - (Bsb_build_util.resolve_bsb_magic_file ~cwd:per_proj_dir - ~desc:Bsb_build_schemas.pp_flags p) - .path) - in - let bs_dependencies = - let dependencies = - extract_dependencies map per_proj_dir Bsb_build_schemas.dependencies - in - if dependencies == [] then - extract_dependencies map per_proj_dir Bsb_build_schemas.bs_dependencies - else dependencies - in - let bs_dev_dependencies = - match package_kind with - | Toplevel -> - let dev_dependencies = - extract_dependencies map per_proj_dir - Bsb_build_schemas.dev_dependencies - in - if dev_dependencies == [] then - extract_dependencies map per_proj_dir - Bsb_build_schemas.bs_dev_dependencies - else dev_dependencies - | Dependency _ -> [] - in - match map.?(Bsb_build_schemas.sources) with - | Some sources -> - let cut_generators = - extract_boolean map Bsb_build_schemas.cut_generators false - in - let groups = - Bsb_parse_sources.scan ~ignored_dirs:(extract_ignored_dirs map) - ~package_kind ~root:per_proj_dir ~cut_generators - (* ~namespace *) - sources - in - let bsc_flags = - let compiler_flags = - extract_string_list map Bsb_build_schemas.compiler_flags - in - if compiler_flags == [] then - extract_string_list map Bsb_build_schemas.bsc_flags - else compiler_flags - in - let jsx = Bsb_jsx.from_map map in - let jsx, bsc_flags = - match package_kind with - | Dependency x -> ({jsx with version = x.jsx.version}, bsc_flags) - | Toplevel -> (jsx, bsc_flags) - in - { - gentype_config; - package_name; - namespace; - warning = extract_warning map; - external_includes = - extract_string_list map Bsb_build_schemas.bs_external_includes; - bsc_flags; - ppx_files = - extract_ppx map ~cwd:per_proj_dir Bsb_build_schemas.ppx_flags; - pp_file = pp_flags; - bs_dependencies; - bs_dev_dependencies; - (* - reference for quoting - {[ - let tmpfile = Filename.temp_file "ocamlpp" "" in - let comm = Printf.sprintf "%s %s > %s" - pp (Filename.quote sourcefile) tmpfile - in - ]} - *) - js_post_build_cmd = extract_js_post_build map per_proj_dir; - package_specs = - (match package_kind with - | Toplevel -> Bsb_package_specs.from_map ~cwd:per_proj_dir map - | Dependency x -> x.package_specs); - file_groups = groups; - files_to_install = Queue.create (); - jsx; - generators = extract_generators map; - cut_generators; - filename; - } - | None -> Bsb_exception.invalid_spec ("no sources specified in " ^ filename) - ) - | _ -> Bsb_exception.invalid_spec (filename ^ " expect a json object {}") - -let deps_from_bsconfig () = - let cwd = Bsb_global_paths.cwd in - match - Bsb_config_load.load_json ~per_proj_dir:cwd ~warn_legacy_config:false - with - | _, Obj {map} -> (Bsb_package_specs.from_map ~cwd map, Bsb_jsx.from_map map) - | _, _ -> assert false diff --git a/compiler/bsb/bsb_config_parse.mli b/compiler/bsb/bsb_config_parse.mli deleted file mode 100644 index fe9e7f23ed0..00000000000 --- a/compiler/bsb/bsb_config_parse.mli +++ /dev/null @@ -1,32 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val deps_from_bsconfig : unit -> Bsb_package_specs.t * Bsb_jsx.t - -val interpret_json : - filename:string -> - json:Ext_json_types.t -> - package_kind:Bsb_package_kind.t -> - per_proj_dir:string -> - Bsb_config_types.t diff --git a/compiler/bsb/bsb_config_types.ml b/compiler/bsb/bsb_config_types.ml deleted file mode 100644 index 1d026dee53f..00000000000 --- a/compiler/bsb/bsb_config_types.ml +++ /dev/null @@ -1,60 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type dependency = {package_name: Bsb_pkg_types.t; package_install_path: string} - -type dependencies = dependency list - -type gentype_config = bool -type command = string -type ppx = {name: string; args: string list} - -type t = { - package_name: string; - (* [captial-package] *) - namespace: string option; - (* CapitalPackage *) - external_includes: string list; - bsc_flags: string list; - ppx_files: ppx list; - pp_file: string option; - bs_dependencies: dependencies; - bs_dev_dependencies: dependencies; - warning: Bsb_warning.t; - (*TODO: maybe we should always resolve rescript - so that we can calculate correct relative path in - [.merlin] - *) - js_post_build_cmd: string option; - package_specs: Bsb_package_specs.t; - file_groups: Bsb_file_groups.t; - files_to_install: Bsb_db.module_info Queue.t; - jsx: Bsb_jsx.t; - (* whether apply PPX transform or not*) - generators: command Map_string.t; - cut_generators: bool; - (* note when used as a dev mode, we will always ignore it *) - gentype_config: gentype_config; - filename: string; -} diff --git a/compiler/bsb/bsb_db_encode.ml b/compiler/bsb/bsb_db_encode.ml deleted file mode 100644 index 083e0188ef5..00000000000 --- a/compiler/bsb/bsb_db_encode.ml +++ /dev/null @@ -1,98 +0,0 @@ -(* Copyright (C) 2019 - Present Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let bsbuild_cache = Literals.bsbuild_cache - -let nl buf = Ext_buffer.add_char buf '\n' - -(* IDEAS: - Pros: - - could be even shortened to a single byte - Cons: - - decode would allocate - - code too verbose - - not readable -*) - -let make_encoding length buf : Ext_buffer.t -> int -> unit = - let max_range = (length lsl 1) + 1 in - if max_range <= 0xff then ( - Ext_buffer.add_char buf '1'; - Ext_buffer.add_int_1) - else if max_range <= 0xff_ff then ( - Ext_buffer.add_char buf '2'; - Ext_buffer.add_int_2) - else if length <= 0x7f_ff_ff then ( - Ext_buffer.add_char buf '3'; - Ext_buffer.add_int_3) - else if length <= 0x7f_ff_ff_ff then ( - Ext_buffer.add_char buf '4'; - Ext_buffer.add_int_4) - else assert false - -(* Make sure [tmp_buf1] and [tmp_buf2] is cleared , - they are only used to control the order. - Strictly speaking, [tmp_buf1] is not needed -*) -let encode_single (db : Bsb_db.map) (buf : Ext_buffer.t) = - (* module name section *) - let len = Map_string.cardinal db in - Ext_buffer.add_string_char buf (string_of_int len) '\n'; - if len <> 0 then ( - let mapping = Hash_string.create 50 in - Map_string.iter db (fun name {dir} -> - Ext_buffer.add_string_char buf name '\n'; - if not (Hash_string.mem mapping dir) then - Hash_string.add mapping dir (Hash_string.length mapping)); - let length = Hash_string.length mapping in - let rev_mapping = Array.make length "" in - Hash_string.iter mapping (fun k i -> Array.unsafe_set rev_mapping i k); - (* directory name section *) - Ext_array.iter rev_mapping (fun s -> Ext_buffer.add_string_char buf s '\t'); - nl buf; - (* module name info section *) - let len_encoding = make_encoding length buf in - Map_string.iter db (fun _ module_info -> - len_encoding buf - ((Hash_string.find_exn mapping module_info.dir lsl 1) - + (Obj.magic (module_info.case : bool) : int))); - nl buf) - -let encode (dbs : Bsb_db.t) buf = - encode_single dbs.lib buf; - encode_single dbs.dev buf - -(* shall we avoid writing such file (checking the digest)? - It is expensive to start scanning the whole code base, - we should we avoid it in the first place, if we do start scanning, - this operation seems affordable -*) -let write_build_cache ~dir (bs_files : Bsb_db.t) : string = - let oc = open_out_bin (Filename.concat dir bsbuild_cache) in - let buf = Ext_buffer.create 100_000 in - encode bs_files buf; - Ext_buffer.output_buffer oc buf; - close_out oc; - let digest = Ext_buffer.digest buf in - Digest.to_hex digest diff --git a/compiler/bsb/bsb_db_encode.mli b/compiler/bsb/bsb_db_encode.mli deleted file mode 100644 index e7698830f17..00000000000 --- a/compiler/bsb/bsb_db_encode.mli +++ /dev/null @@ -1,27 +0,0 @@ -(* Copyright (C) 2019 - Present Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val encode : Bsb_db.t -> Ext_buffer.t -> unit - -val write_build_cache : dir:string -> Bsb_db.t -> string diff --git a/compiler/bsb/bsb_db_util.ml b/compiler/bsb/bsb_db_util.ml deleted file mode 100644 index 94b54d18efa..00000000000 --- a/compiler/bsb/bsb_db_util.ml +++ /dev/null @@ -1,101 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) -type module_info = Bsb_db.module_info - -type t = Bsb_db.map - -let conflict_module_info modname (a : module_info) (b : module_info) = - Bsb_exception.conflict_module modname a.dir b.dir - -(* merge data info from two directories*) -let merge (acc : t) (sources : t) : t = - Map_string.disjoint_merge_exn acc sources conflict_module_info - -let sanity_check (map : t) = - Map_string.iter map (fun m module_info -> - if module_info.info = Intf then Bsb_exception.no_implementation m) - -(* invariant check: - ml and mli should have the same case, same path -*) -let check (x : module_info) name_sans_extension case (module_info : Bsb_db.info) - = - let x_ml_info = x.info in - if - x.name_sans_extension <> name_sans_extension - || x.case <> case || x_ml_info = module_info || x_ml_info = Impl_intf - then - Bsb_exception.invalid_spec - (Printf.sprintf - "implementation and interface have different path names or different \ - cases %s vs %s" - x.name_sans_extension name_sans_extension); - x.info <- Impl_intf; - x - -let warning_unused_file : _ format = - "@{IGNORED@}: file %s under %s is ignored because it can't be \ - turned into a valid module name. \n\ - The build system transforms a file name into a module name by upper-casing \ - the first letter@." -(* TODO: add a link for more explanations *) - -let is_editor_temporary_files basename = Ext_string.starts_with basename ".#" - -(* - Example: .#hi.ml - Note for other files like ~, .swp - it does not pass the suffix rules -*) -let add_basename ~(dir : string) (map : t) ?error_on_invalid_suffix basename : t - = - if is_editor_temporary_files basename then map - else - let info = ref Bsb_db.Impl in - let invalid_suffix = ref false in - let file_suffix = Ext_filename.get_extension_maybe basename in - (match () with - | _ when file_suffix = Literals.suffix_res -> () - | _ when file_suffix = Literals.suffix_resi -> info := Intf - | _ -> invalid_suffix := true); - let info = !info in - let invalid_suffix = !invalid_suffix in - if invalid_suffix then - match error_on_invalid_suffix with - | None -> map - | Some loc -> Bsb_exception.errorf ~loc "invalid suffix %s" basename - else - match Ext_filename.as_module ~basename:(Filename.basename basename) with - | None -> - Bsb_log.warn warning_unused_file basename dir; - map - | Some {module_name; case} -> - let name_sans_extension = - Filename.concat dir (Ext_filename.chop_extension_maybe basename) - in - let dir = Filename.dirname name_sans_extension in - Map_string.adjust map module_name (fun opt_module_info -> - match opt_module_info with - | None -> {dir; name_sans_extension; info; case} - | Some x -> check x name_sans_extension case info) diff --git a/compiler/bsb/bsb_db_util.mli b/compiler/bsb/bsb_db_util.mli deleted file mode 100644 index d4ddf79f403..00000000000 --- a/compiler/bsb/bsb_db_util.mli +++ /dev/null @@ -1,42 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val conflict_module_info : - string -> Bsb_db.module_info -> Bsb_db.module_info -> exn - -val merge : Bsb_db.map -> Bsb_db.map -> Bsb_db.map - -val sanity_check : Bsb_db.map -> unit - -(** - Currently it is okay to have duplicated module, - In the future, we may emit a warning -*) - -val add_basename : - dir:string -> - Bsb_db.map -> - ?error_on_invalid_suffix:Ext_position.t -> - string -> - Bsb_db.map diff --git a/compiler/bsb/bsb_exception.ml b/compiler/bsb/bsb_exception.ml deleted file mode 100644 index 121e902bb6a..00000000000 --- a/compiler/bsb/bsb_exception.ml +++ /dev/null @@ -1,92 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type error = - | Package_not_found of Bsb_pkg_types.t - | Json_config of Ext_position.t * string - | Invalid_spec of string - | Conflict_module of string * string * string - | No_implementation of string - -exception Error of error - -let error err = raise (Error err) - -let package_not_found ~pkg = error (Package_not_found pkg) - -let print (fmt : Format.formatter) (x : error) = - match x with - | Conflict_module (modname, dir1, dir2) -> - Format.fprintf fmt - "@{Error:@} %s found in two directories: (%s, %s)\n\ - File names must be unique per project" - modname dir1 dir2 - | No_implementation modname -> - Format.fprintf fmt "@{Error:@} %s does not have implementation file" - modname - | Package_not_found name -> - let name = Bsb_pkg_types.to_string name in - if Ext_string.equal name Runtime_package.name then - Format.fprintf fmt - "File \"rescript.json\", line 1\n\ - @{Error:@} package @{%s@} is not found\n\ - It's the basic, required package. If you have it installed globally,\n\ - Run `npm link rescript` to make it available." - name - else - Format.fprintf fmt - "File \"rescript.json\", line 1\n\ - @{Error:@} package @{%s@} not found or built\n\ - - Did you install it?" - name - | Json_config (pos, s) -> - Format.fprintf fmt - "File %S, line %d:\n\ - @{Error:@} %s \n\ - For more details, check out the schema at \ - https://rescript-lang.org/docs/manual/latest/build-configuration-schema" - pos.pos_fname pos.pos_lnum s - | Invalid_spec s -> - Format.fprintf fmt "@{Error: Invalid rescript.json: %s@}" s - -let conflict_module modname dir1 dir2 = - Error (Conflict_module (modname, dir1, dir2)) - -let no_implementation modname = error (No_implementation modname) - -let errorf ~loc fmt = - Format.ksprintf (fun s -> error (Json_config (loc, s))) fmt - -let config_error config fmt = - let loc = Ext_json.loc_of config in - - error (Json_config (loc, fmt)) - -let invalid_spec s = error (Invalid_spec s) - -let () = - Printexc.register_printer (fun x -> - match x with - | Error x -> Some (Format.asprintf "%a" print x) - | _ -> None) diff --git a/compiler/bsb/bsb_exception.mli b/compiler/bsb/bsb_exception.mli deleted file mode 100644 index babe37c8c2f..00000000000 --- a/compiler/bsb/bsb_exception.mli +++ /dev/null @@ -1,44 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type error -(** - This module is used for fatal errros -*) - -exception Error of error - -val print : Format.formatter -> error -> unit - -val package_not_found : pkg:Bsb_pkg_types.t -> 'a - -val conflict_module : string -> string -> string -> exn - -val errorf : loc:Ext_position.t -> ('a, unit, string, 'b) format4 -> 'a - -val config_error : Ext_json_types.t -> string -> 'a - -val invalid_spec : string -> 'a - -val no_implementation : string -> 'a diff --git a/compiler/bsb/bsb_file_groups.ml b/compiler/bsb/bsb_file_groups.ml deleted file mode 100644 index 3b5757a06a3..00000000000 --- a/compiler/bsb/bsb_file_groups.ml +++ /dev/null @@ -1,73 +0,0 @@ -(* Copyright (C) 2018 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type public = Export_none | Export_all | Export_set of Set_string.t - -type build_generator = { - input: string list; - output: string list; - command: string; -} - -type file_group = { - dir: string; - sources: Bsb_db.map; - resources: string list; - public: public; - is_dev: bool; - generators: build_generator list; - (* output of [generators] should be added to [sources], - if it is [.ml,.mli,.res,.resi] - *) -} - -type file_groups = file_group list - -type t = {files: file_groups; globbed_dirs: string list} - -let empty : t = {files = []; globbed_dirs = []} - -let merge (u : t) (v : t) = - if u == empty then v - else if v == empty then u - else - { - files = Ext_list.append u.files v.files; - globbed_dirs = Ext_list.append u.globbed_dirs v.globbed_dirs; - } - -let cons ~file_group ?globbed_dir (v : t) : t = - { - files = file_group :: v.files; - globbed_dirs = - (match globbed_dir with - | None -> v.globbed_dirs - | Some f -> f :: v.globbed_dirs); - } - -(** when [is_empty file_group] - we don't need issue [-I] [-S] in [.merlin] file -*) -let is_empty (x : file_group) = - Map_string.is_empty x.sources && x.resources = [] && x.generators = [] diff --git a/compiler/bsb/bsb_file_groups.mli b/compiler/bsb/bsb_file_groups.mli deleted file mode 100644 index 521b40924db..00000000000 --- a/compiler/bsb/bsb_file_groups.mli +++ /dev/null @@ -1,56 +0,0 @@ -(* Copyright (C) 2018 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type public = Export_none | Export_all | Export_set of Set_string.t - -type build_generator = { - input: string list; - output: string list; - command: string; -} - -type file_group = { - dir: string; - sources: Bsb_db.map; - resources: string list; - public: public; - is_dev: bool; - (* false means not in dev mode *) - generators: build_generator list; - (* output of [generators] should be added to [sources], - if it is [.ml,.mli,.res,.resi] - *) -} - -type file_groups = file_group list - -type t = private {files: file_groups; globbed_dirs: string list} - -val empty : t - -val merge : t -> t -> t - -val cons : file_group:file_group -> ?globbed_dir:string -> t -> t - -val is_empty : file_group -> bool diff --git a/compiler/bsb/bsb_global_paths.ml b/compiler/bsb/bsb_global_paths.ml deleted file mode 100644 index 97ccc5f3996..00000000000 --- a/compiler/bsb/bsb_global_paths.ml +++ /dev/null @@ -1,58 +0,0 @@ -(* Copyright (C) 2019 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let cwd = Sys.getcwd () - -(** - If [Sys.executable_name] gives an absolute path, - nothing needs to be done. - - If [Sys.executable_name] is not an absolute path, for example - (rlwrap ./ocaml) - it is a relative path, - it needs be adapted based on cwd - - if [Sys.executable_name] gives an absolute path, - nothing needs to be done - if it is a relative path - - there are two cases: - - bsb.exe - - ./bsb.exe - The first should also not be touched - Only the latter need be adapted based on project root -*) - -let bsc_dir = - Filename.dirname - (Ext_path.normalize_absolute_path - (Ext_path.combine cwd Sys.executable_name)) - -let vendor_bsc = Filename.concat bsc_dir "bsc.exe" - -let vendor_ninja = Filename.concat bsc_dir "ninja.exe" - -let vendor_bsdep = Filename.concat bsc_dir "bsb_helper.exe";; - -assert (Sys.file_exists bsc_dir) diff --git a/compiler/bsb/bsb_global_paths.mli b/compiler/bsb/bsb_global_paths.mli deleted file mode 100644 index 85dda4de833..00000000000 --- a/compiler/bsb/bsb_global_paths.mli +++ /dev/null @@ -1,33 +0,0 @@ -(* Copyright (C) 2019 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val cwd : string - -val bsc_dir : string - -val vendor_bsc : string - -val vendor_ninja : string - -val vendor_bsdep : string diff --git a/compiler/bsb/bsb_jsx.ml b/compiler/bsb/bsb_jsx.ml deleted file mode 100644 index 84c0da9e6ae..00000000000 --- a/compiler/bsb/bsb_jsx.ml +++ /dev/null @@ -1,57 +0,0 @@ -type version = Jsx_v4 -type module_ = React | Generic of {moduleName: string} -type dependencies = string list - -type t = {version: version option; module_: module_ option} - -let encode_no_nl jsx = - (match jsx.version with - | None -> "" - | Some Jsx_v4 -> "4") - ^ - match jsx.module_ with - | None -> "" - | Some React -> "React" - | Some (Generic {moduleName}) -> moduleName - -let ( .?() ) = Map_string.find_opt -let ( |? ) m (key, cb) = m |> Ext_json.test key cb - -let get_list_string_acc (s : Ext_json_types.t array) acc = - Ext_array.to_list_map_acc s acc (fun x -> - match x with - | Str x -> Some x.str - | _ -> None) - -let get_list_string s = get_list_string_acc s [] - -let from_map map = - let version : version option ref = ref None in - let module_ : module_ option ref = ref None in - map - |? ( Bsb_build_schemas.jsx, - `Obj - (fun m -> - match m.?(Bsb_build_schemas.jsx_version) with - | Some (Flo {loc; flo}) -> ( - match flo with - | "4" -> version := Some Jsx_v4 - | _ -> Bsb_exception.errorf ~loc "Unsupported jsx version %s" flo) - | Some x -> - Bsb_exception.config_error x - "Unexpected input (expect a version number) for jsx version" - | None -> ()) ) - |? ( Bsb_build_schemas.jsx, - `Obj - (fun m -> - match m.?(Bsb_build_schemas.jsx_module) with - | Some (Str {str}) -> ( - match str with - | "react" -> module_ := Some React - | moduleName -> module_ := Some (Generic {moduleName})) - | Some x -> - Bsb_exception.config_error x - "Unexpected input (jsx module name) for jsx module" - | None -> ()) ) - |> ignore; - {version = !version; module_ = !module_} diff --git a/compiler/bsb/bsb_log.ml b/compiler/bsb/bsb_log.ml deleted file mode 100644 index 3c890deaafa..00000000000 --- a/compiler/bsb/bsb_log.ml +++ /dev/null @@ -1,98 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ninja_ansi_forced = - lazy (try Sys.getenv "NINJA_ANSI_FORCED" with Not_found -> "") - -let color_enabled = lazy (Unix.isatty Unix.stdout) - -(* same logic as [ninja.exe] *) -let get_color_enabled () = - let colorful = - match ninja_ansi_forced with - | (lazy "1") -> true - | (lazy ("0" | "false")) -> false - | _ -> Lazy.force color_enabled - in - colorful - -let color_functions : Format.formatter_stag_functions = - { - mark_open_stag = - (fun s -> - if get_color_enabled () then Ext_color.ansi_of_tag s - else Ext_string.empty); - mark_close_stag = - (fun _ -> - if get_color_enabled () then Ext_color.reset_lit else Ext_string.empty); - print_open_stag = (fun _ -> ()); - print_close_stag = (fun _ -> ()); - } - -(* let set_color ppf = - Format.pp_set_formatter_tag_functions ppf color_functions *) - -let setup () = - Format.pp_set_mark_tags Format.std_formatter true; - Format.pp_set_mark_tags Format.err_formatter true; - Format.pp_set_formatter_stag_functions Format.std_formatter color_functions; - Format.pp_set_formatter_stag_functions Format.err_formatter color_functions - -type level = Debug | Info | Warn | Error - -let int_of_level (x : level) = - match x with - | Debug -> 0 - | Info -> 1 - | Warn -> 2 - | Error -> 3 - -let log_level = ref Warn - -let verbose () = log_level := Debug - -let dfprintf level fmt = - if int_of_level level >= int_of_level !log_level then Format.fprintf fmt - else Format.ifprintf fmt - -type 'a fmt = Format.formatter -> ('a, Format.formatter, unit) format -> 'a - -type 'a log = ('a, Format.formatter, unit) format -> 'a - -let debug fmt = dfprintf Debug Format.std_formatter fmt - -let info fmt = dfprintf Info Format.std_formatter fmt - -let warn fmt = dfprintf Warn Format.err_formatter fmt - -let error fmt = dfprintf Error Format.err_formatter fmt - -let info_args (args : string array) = - if int_of_level Info >= int_of_level !log_level then ( - for i = 0 to Array.length args - 1 do - Format.pp_print_string Format.std_formatter (Array.unsafe_get args i); - Format.pp_print_string Format.std_formatter Ext_string.single_space - done; - Format.pp_print_newline Format.std_formatter ()) - else () diff --git a/compiler/bsb/bsb_log.mli b/compiler/bsb/bsb_log.mli deleted file mode 100644 index de1f1d6f924..00000000000 --- a/compiler/bsb/bsb_log.mli +++ /dev/null @@ -1,45 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val setup : unit -> unit - -type level = Debug | Info | Warn | Error - -val log_level : level ref - -type 'a fmt = Format.formatter -> ('a, Format.formatter, unit) format -> 'a - -type 'a log = ('a, Format.formatter, unit) format -> 'a - -val verbose : unit -> unit - -val debug : 'a log - -val info : 'a log - -val warn : 'a log - -val error : 'a log - -val info_args : string array -> unit diff --git a/compiler/bsb/bsb_namespace_map_gen.ml b/compiler/bsb/bsb_namespace_map_gen.ml deleted file mode 100644 index 5a9b5b4279a..00000000000 --- a/compiler/bsb/bsb_namespace_map_gen.ml +++ /dev/null @@ -1,57 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine - -let write_file fname digest contents = - let oc = open_out_bin fname in - Digest.output oc digest; - output_char oc '\n'; - Ext_buffer.output_buffer oc contents; - close_out oc - -(* - TODO: - sort filegroupts to ensure deterministic behavior - - if [.bsbuild] is not changed - [.mlmap] does not need to be changed too - -*) -let output ~dir (namespace : string) (file_groups : Bsb_file_groups.file_groups) - = - let fname = namespace ^ Literals.suffix_mlmap in - let buf = Ext_buffer.create 10000 in - Ext_list.iter file_groups (fun x -> - Map_string.iter x.sources (fun k _ -> - Ext_buffer.add_string_char buf k '\n')); - (* let contents = Buffer.contents buf in *) - let digest = Ext_buffer.digest buf in - let fname = dir // fname in - if Sys.file_exists fname then ( - let ic = open_in_bin fname in - let old_digest = really_input_string ic Ext_digest.length in - close_in ic; - if old_digest <> digest then write_file fname digest buf) - else write_file fname digest buf diff --git a/compiler/bsb/bsb_namespace_map_gen.mli b/compiler/bsb/bsb_namespace_map_gen.mli deleted file mode 100644 index 6090fd80370..00000000000 --- a/compiler/bsb/bsb_namespace_map_gen.mli +++ /dev/null @@ -1,29 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val output : dir:string -> string -> Bsb_file_groups.file_groups -> unit -(** [output dir namespace file_groups] - when [build.ninja] is generated, we output a module map [.mlmap] file - such [.mlmap] file will be consumed by [bsc.exe] to generate [.cmi] file -*) diff --git a/compiler/bsb/bsb_ninja_check.ml b/compiler/bsb/bsb_ninja_check.ml deleted file mode 100644 index 1278882ebea..00000000000 --- a/compiler/bsb/bsb_ninja_check.ml +++ /dev/null @@ -1,157 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -[@@@warning "+9"] - -(* float_of_string_opt *) -external hexstring_of_float : float -> int -> char -> string - = "caml_hexstring_of_float" - -let hex_of_float f = hexstring_of_float f (-1) '-' - -(* This should not lose any preicision *) -(* let id (f : float) = - float_of_string (hex_of_float f) = f -*) - -type check_result = - | Good - | Bsb_file_corrupted - | Bsb_file_not_exist (** We assume that it is a clean repo *) - | Bsb_source_directory_changed - | Bsb_bsc_version_mismatch - | Bsb_forced - | Bsb_package_kind_inconsistent - | Bsb_regenerate_required - | Other of string - -let pp_check_result fmt (check_resoult : check_result) = - Format.pp_print_string fmt - (match check_resoult with - | Good -> "OK" - | Bsb_file_corrupted -> "Stored data corrupted" - | Bsb_file_not_exist -> "Dependencies information missing" - | Bsb_source_directory_changed -> "Bsb source directory changed" - | Bsb_bsc_version_mismatch -> "Bsc or bsb version mismatch" - | Bsb_forced -> "Bsb forced rebuild" - | Bsb_package_kind_inconsistent -> "The package was built in different mode" - | Bsb_regenerate_required -> "Bsb need regenerate build.ninja" - | Other s -> s) - -let rec check_aux cwd (xs : string list) = - match xs with - | [] -> Good - | "===" :: rest -> check_global_atime rest - | item :: rest -> ( - match Ext_string.split item '\t' with - | [file; stamp] -> - let stamp = float_of_string stamp in - let cur_file = Filename.concat cwd file in - let stat = Unix.stat cur_file in - if stat.st_mtime <= stamp then check_aux cwd rest else Other cur_file - | _ -> Bsb_file_corrupted) - -and check_global_atime rest = - match rest with - | [] -> Good - | item :: rest -> ( - match Ext_string.split item '\t' with - | [file; stamp] -> - let stamp = float_of_string stamp in - let cur_file = file in - let stat = Unix.stat cur_file in - if stat.st_atime <= stamp then check_global_atime rest else Other cur_file - | _ -> Bsb_file_corrupted) - -(* TODO: for such small data structure, maybe text format is better *) - -let record_global_atime buf name = - let stamp = (Unix.stat name).st_atime in - Ext_buffer.add_string_char buf name '\t'; - Ext_buffer.add_string_char buf (hex_of_float stamp) '\n' - -let record ~(package_kind : Bsb_package_kind.t) ~per_proj_dir ~file - ~(config : Bsb_config_types.t) ~(warn_as_error : string option) - (file_or_dirs : string list) : unit = - let buf = Ext_buffer.create 1_000 in - Ext_buffer.add_string_char buf Bs_version.version '\n'; - Ext_buffer.add_string_char buf per_proj_dir '\n'; - Ext_buffer.add_string_char buf - (Bsb_package_kind.encode_no_nl package_kind) - '\n'; - Ext_buffer.add_string_char buf - (match warn_as_error with - | Some s -> s - | None -> "0") - '\n'; - Ext_list.iter file_or_dirs (fun f -> - Ext_buffer.add_string_char buf f '\t'; - Ext_buffer.add_string_char buf - (hex_of_float (Unix.stat (Filename.concat per_proj_dir f)).st_mtime) - '\n'); - Ext_buffer.add_string buf "===\n"; - record_global_atime buf Sys.executable_name; - Ext_list.iter config.ppx_files (fun {name; args = _} -> - try record_global_atime buf name - with _ -> - (* record the ppx files as a best effort *) - ()); - let oc = open_out_bin file in - Ext_buffer.output_buffer oc buf; - close_out oc - -(** check time stamp for all files - TODO: those checks system call can be saved later - Return a reason - Even forced, we still need walk through a little - bit in case we found a different version of compiler -*) -let check ~(package_kind : Bsb_package_kind.t) ~(per_proj_dir : string) ~forced - ~(warn_as_error : string option) ~file : check_result = - match open_in_bin file with - (* Windows binary mode*) - | exception _ -> Bsb_file_not_exist - | ic -> ( - match List.rev (Ext_io.rev_lines_of_chann ic) with - | exception _ -> Bsb_file_corrupted - | version :: source_directory :: package_kind_str :: previous_warn_as_error - :: dir_or_files -> ( - let warn_as_error_changed = - match warn_as_error with - | None -> previous_warn_as_error <> "0" - | Some current -> current <> previous_warn_as_error - in - - if version <> Bs_version.version then Bsb_bsc_version_mismatch - else if per_proj_dir <> source_directory then Bsb_source_directory_changed - else if forced then Bsb_forced (* No need walk through *) - else if Bsb_package_kind.encode_no_nl package_kind <> package_kind_str - then Bsb_package_kind_inconsistent - else if warn_as_error_changed then Bsb_regenerate_required - else - try check_aux per_proj_dir dir_or_files - with e -> - Bsb_log.info "@{Stat miss %s@}@." (Printexc.to_string e); - Bsb_file_not_exist) - | _ -> Bsb_file_corrupted) diff --git a/compiler/bsb/bsb_ninja_check.mli b/compiler/bsb/bsb_ninja_check.mli deleted file mode 100644 index 96419ec5ebf..00000000000 --- a/compiler/bsb/bsb_ninja_check.mli +++ /dev/null @@ -1,72 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(** - This module is used to check whether [build.ninja] needs - be regenerated. Everytime [bsb] run [regenerate_ninja], - bsb will try to [check] if it is needed, - if needed, bsb will regenerate ninja file and store the - metadata again -*) - -type check_result = - | Good - | Bsb_file_corrupted - | Bsb_file_not_exist (** We assume that it is a clean repo *) - | Bsb_source_directory_changed - | Bsb_bsc_version_mismatch - | Bsb_forced - | Bsb_package_kind_inconsistent - | Bsb_regenerate_required - | Other of string - -val pp_check_result : Format.formatter -> check_result -> unit - -val record : - package_kind:Bsb_package_kind.t -> - per_proj_dir:string -> - file:string -> - config:Bsb_config_types.t -> - warn_as_error:string option -> - string list -> - unit -(** [record cwd file relevant_file_or_dirs] - The data structure we decided to whether regenerate [build.ninja] - or not. - Note that if we don't record absolute path, ninja will not notice its build spec changed, - it will not trigger rebuild behavior, - It may not be desired behavior, since there is some subtlies here (__FILE__ or __dirname) - - We serialize such data structure and call {!check} to decide - [build.ninja] should be regenerated -*) - -val check : - package_kind:Bsb_package_kind.t -> - per_proj_dir:string -> - forced:bool -> - warn_as_error:string option -> - file:string -> - check_result -(** check if [build.ninja] should be regenerated *) diff --git a/compiler/bsb/bsb_ninja_file_groups.ml b/compiler/bsb/bsb_ninja_file_groups.ml deleted file mode 100644 index 8e4d157cf94..00000000000 --- a/compiler/bsb/bsb_ninja_file_groups.ml +++ /dev/null @@ -1,117 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine - -let handle_generators oc (group : Bsb_file_groups.file_group) custom_rules = - let map_to_source_dir x = Bsb_config.proj_rel (group.dir // x) in - Ext_list.iter group.generators (fun {output; input; command} -> - (*TODO: add a loc for better error message *) - match Map_string.find_opt custom_rules command with - | None -> - Ext_fmt.failwithf ~loc:__LOC__ "custom rule %s used but not defined" - command - | Some rule -> - Bsb_ninja_targets.output_build oc - ~outputs:(Ext_list.map output map_to_source_dir) - ~inputs:(Ext_list.map input map_to_source_dir) - ~rule) - -type suffixes = {impl: string; intf: string} - -let res_suffixes = {impl = Literals.suffix_res; intf = Literals.suffix_resi} - -let emit_module_build (rules : Bsb_ninja_rule.builtin) - (package_specs : Bsb_package_specs.t) (is_dev : bool) oc namespace - (module_info : Bsb_db.module_info) : unit = - let has_intf_file = module_info.info = Impl_intf in - let config, ast_rule = (res_suffixes, rules.build_ast_from_re) in - let filename_sans_extension = module_info.name_sans_extension in - let input_impl = - Bsb_config.proj_rel (filename_sans_extension ^ config.impl) - in - let input_intf = - Bsb_config.proj_rel (filename_sans_extension ^ config.intf) - in - let output_ast = filename_sans_extension ^ Literals.suffix_ast in - let output_iast = filename_sans_extension ^ Literals.suffix_iast in - let output_d = filename_sans_extension ^ Literals.suffix_d in - let output_filename_sans_extension = - Ext_namespace_encode.make ?ns:namespace filename_sans_extension - in - let output_cmi = output_filename_sans_extension ^ Literals.suffix_cmi in - let output_cmj = output_filename_sans_extension ^ Literals.suffix_cmj in - let output_js = - Bsb_package_specs.get_list_of_output_js package_specs - output_filename_sans_extension - in - - Bsb_ninja_targets.output_build oc ~outputs:[output_ast] ~inputs:[input_impl] - ~rule:ast_rule; - Bsb_ninja_targets.output_build oc ~outputs:[output_d] - ~inputs:(if has_intf_file then [output_ast; output_iast] else [output_ast]) - ~rule:(if is_dev then rules.build_bin_deps_dev else rules.build_bin_deps); - if has_intf_file then ( - Bsb_ninja_targets.output_build oc - ~outputs:[output_iast] - (* TODO: we can get rid of absloute path if we fixed the location to be - [lib/bs], better for testing? - *) - ~inputs:[input_intf] ~rule:ast_rule; - Bsb_ninja_targets.output_build oc ~outputs:[output_cmi] - ~inputs:[output_iast] - ~rule:(if is_dev then rules.mi_dev else rules.mi)); - let rule = - if has_intf_file then if is_dev then rules.mj_dev else rules.mj - else if is_dev then rules.mij_dev - else rules.mij - in - Bsb_ninja_targets.output_build oc - ~outputs: - (if has_intf_file then output_cmj :: output_js - else output_cmj :: output_cmi :: output_js) - ~inputs:(if has_intf_file then [output_ast; output_cmi] else [output_ast]) - ~rule - -let handle_files_per_dir oc ~(rules : Bsb_ninja_rule.builtin) ~package_specs - ~files_to_install ~(namespace : string option) - (group : Bsb_file_groups.file_group) : unit = - let is_dev = group.is_dev in - handle_generators oc group rules.customs; - let installable = - match group.public with - | Export_all -> fun _ -> true - | Export_none -> fun _ -> false - | Export_set set -> fun module_name -> Set_string.mem set module_name - in - Map_string.iter group.sources (fun module_name module_info -> - if installable module_name && not is_dev then - Queue.add module_info files_to_install; - emit_module_build rules package_specs is_dev oc namespace module_info) - -(* ; - Bsb_ninja_targets.phony - oc ~order_only_deps:[] ~inputs:[] ~output:group.dir *) - -(* pseuduo targets per directory *) diff --git a/compiler/bsb/bsb_ninja_file_groups.mli b/compiler/bsb/bsb_ninja_file_groups.mli deleted file mode 100644 index 60836ef0456..00000000000 --- a/compiler/bsb/bsb_ninja_file_groups.mli +++ /dev/null @@ -1,32 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val handle_files_per_dir : - out_channel -> - rules:Bsb_ninja_rule.builtin -> - package_specs:Bsb_package_specs.t -> - files_to_install:Bsb_db.module_info Queue.t -> - namespace:string option -> - Bsb_file_groups.file_group -> - unit diff --git a/compiler/bsb/bsb_ninja_gen.ml b/compiler/bsb/bsb_ninja_gen.ml deleted file mode 100644 index d6dbe62b8a9..00000000000 --- a/compiler/bsb/bsb_ninja_gen.ml +++ /dev/null @@ -1,227 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine - -(* we need copy package.json into [_build] since it does affect build output - it is a bad idea to copy package.json which requires to copy js files -*) - -(* let dash_i = "-I" *) - -let get_bsc_flags (bsc_flags : string list) : string = - String.concat Ext_string.single_space bsc_flags - -let emit_bsc_lib_includes (bs_dependencies : Bsb_config_types.dependencies) - (source_dirs : string list) external_includes (namespace : _ option) : - string = - (* TODO: bsc_flags contain stdlib path which is in the latter position currently *) - let all_includes source_dirs = - source_dirs - @ Ext_list.map bs_dependencies (fun x -> x.package_install_path) - @ - (* for external includes, if it is absolute path, leave it as is - for relative path './xx', we need '../.././x' since we are in - [lib/bs], [build] is different from merlin though - *) - Ext_list.map external_includes (fun x -> - if Filename.is_relative x then Bsb_config.rev_lib_bs_prefix x else x) - in - - Bsb_build_util.include_dirs - (all_includes - (if namespace = None then source_dirs - else Filename.current_dir_name :: source_dirs - (*working dir is [lib/bs] we include this path to have namespace mapping*))) - -let output_static_resources (static_resources : string list) copy_rule oc = - Ext_list.iter static_resources (fun output -> - Bsb_ninja_targets.output_build oc ~outputs:[output] - ~inputs:[Bsb_config.proj_rel output] - ~rule:copy_rule); - if static_resources <> [] then - Bsb_ninja_targets.phony oc ~order_only_deps:static_resources ~inputs:[] - ~output:Literals.build_ninja - -(* - FIXME: check if the trick still works - phony build.ninja : | resources -*) -let mark_rescript oc = output_string oc "rescript = 1\n" - -let output_installation_file cwd_lib_bs namespace files_to_install = - let install_oc = open_out_bin (cwd_lib_bs // "install.ninja") in - mark_rescript install_oc; - let o s = output_string install_oc s in - let[@inline] oo suffix ~dest ~src = - o "o "; - o dest; - o suffix; - o " : cp "; - o src; - o suffix; - o "\n" - in - let bs = ".." // "bs" in - let sb = ".." // ".." in - o - (if Ext_sys.is_windows_or_cygwin then - "rule cp\n\ - \ command = cmd.exe /C copy /Y $i $out >NUL\n\ - rule touch\n\ - \ command = cmd.exe /C type nul >>$out & copy $out+,, >NUL\n" - else "rule cp\n command = cp $i $out\nrule touch\n command = touch $out\n"); - let essentials = Ext_buffer.create 1_000 in - files_to_install - |> Queue.iter (fun ({name_sans_extension; info} : Bsb_db.module_info) -> - let base = Filename.basename name_sans_extension in - let dest = Ext_namespace_encode.make ?ns:namespace base in - let ns_origin = - Ext_namespace_encode.make ?ns:namespace name_sans_extension - in - let src = bs // ns_origin in - oo Literals.suffix_cmi ~dest ~src; - oo Literals.suffix_cmj ~dest ~src; - oo Literals.suffix_cmt ~dest ~src; - - Ext_buffer.add_string essentials dest; - Ext_buffer.add_string_char essentials Literals.suffix_cmi ' '; - Ext_buffer.add_string essentials dest; - Ext_buffer.add_string_char essentials Literals.suffix_cmj ' '; - - let suffix_impl = Literals.suffix_res in - oo suffix_impl ~dest:base ~src:(sb // name_sans_extension); - match info with - | Intf -> assert false - | Impl -> () - | Impl_intf -> - let suffix_intf = Literals.suffix_resi in - oo suffix_intf ~dest:base ~src:(sb // name_sans_extension); - oo Literals.suffix_cmti ~dest ~src); - (match namespace with - | None -> () - | Some dest -> - let src = bs // dest in - oo Literals.suffix_cmi ~dest ~src; - oo Literals.suffix_cmj ~dest ~src; - oo Literals.suffix_cmt ~dest ~src; - Ext_buffer.add_string essentials dest; - Ext_buffer.add_string_char essentials Literals.suffix_cmi ' '; - Ext_buffer.add_string essentials dest; - Ext_buffer.add_string essentials Literals.suffix_cmj); - Ext_buffer.add_char essentials '\n'; - o "build install.stamp : touch "; - Ext_buffer.output_buffer install_oc essentials; - close_out install_oc - -let output_ninja_and_namespace_map ~per_proj_dir ~package_kind - ({ - package_name; - external_includes; - bsc_flags; - pp_file; - ppx_files; - bs_dependencies; - bs_dev_dependencies; - js_post_build_cmd; - package_specs; - file_groups = {files = bs_file_groups}; - files_to_install; - jsx; - generators; - namespace; - warning; - gentype_config; - } : - Bsb_config_types.t) : unit = - let lib_artifacts_dir = Bsb_config.lib_bs in - let cwd_lib_bs = per_proj_dir // lib_artifacts_dir in - - let warnings = Bsb_warning.to_bsb_string ~package_kind warning in - let bsc_flags = get_bsc_flags bsc_flags in - let dpkg_incls = - Bsb_build_util.include_dirs_by bs_dev_dependencies (fun x -> - x.package_install_path) - in - let bs_groups : Bsb_db.t = {lib = Map_string.empty; dev = Map_string.empty} in - let source_dirs : string list Bsb_db.cat = {lib = []; dev = []} in - let static_resources = - Ext_list.fold_left bs_file_groups [] - (fun (acc_resources : string list) {sources; dir; resources; is_dev} -> - if is_dev then ( - bs_groups.dev <- Bsb_db_util.merge bs_groups.dev sources; - source_dirs.dev <- dir :: source_dirs.dev) - else ( - bs_groups.lib <- Bsb_db_util.merge bs_groups.lib sources; - source_dirs.lib <- dir :: source_dirs.lib); - Ext_list.map_append resources acc_resources (fun x -> dir // x)) - in - let lib = bs_groups.lib in - let dev = bs_groups.dev in - Bsb_db_util.sanity_check lib; - Bsb_db_util.sanity_check dev; - Map_string.iter dev (fun k a -> - if Map_string.mem lib k then - raise (Bsb_db_util.conflict_module_info k a (Map_string.find_exn lib k))); - let dev_incls = Bsb_build_util.include_dirs source_dirs.dev in - let digest = Bsb_db_encode.write_build_cache ~dir:cwd_lib_bs bs_groups in - let lib_incls = - emit_bsc_lib_includes bs_dependencies source_dirs.lib external_includes - namespace - in - let rules : Bsb_ninja_rule.builtin = - Bsb_ninja_rule.make_custom_rules ~gentype_config - ~has_postbuild:js_post_build_cmd ~pp_file ~jsx ~package_specs ~namespace - ~digest ~package_name ~warnings ~ppx_files ~bsc_flags - ~dpkg_incls (* dev dependencies *) - ~lib_incls (* its own libs *) - ~dev_incls (* its own devs *) - generators - in - - let oc = open_out_bin (cwd_lib_bs // Literals.build_ninja) in - mark_rescript oc; - let finger_file (x : Bsb_config_types.dependency) = - x.package_install_path // "install.stamp" - in - Ext_list.iter bs_dependencies (fun x -> - Bsb_ninja_targets.output_finger Bsb_ninja_global_vars.g_finger - (finger_file x) oc); - Ext_list.iter bs_dev_dependencies (fun x -> - Bsb_ninja_targets.output_finger Bsb_ninja_global_vars.g_finger - (finger_file x) oc); - output_static_resources static_resources rules.copy_resources oc; - (* Generate build statement for each file *) - Ext_list.iter bs_file_groups (fun files_per_dir -> - Bsb_ninja_file_groups.handle_files_per_dir oc ~rules ~package_specs - ~files_to_install ~namespace files_per_dir); - Ext_option.iter namespace (fun ns -> - let namespace_dir = per_proj_dir // lib_artifacts_dir in - Bsb_namespace_map_gen.output ~dir:namespace_dir ns bs_file_groups; - Bsb_ninja_targets.output_build oc - ~outputs:[ns ^ Literals.suffix_cmi] - ~inputs:[ns ^ Literals.suffix_mlmap] - ~rule:rules.build_package); - close_out oc; - output_installation_file cwd_lib_bs namespace files_to_install diff --git a/compiler/bsb/bsb_ninja_gen.mli b/compiler/bsb/bsb_ninja_gen.mli deleted file mode 100644 index 2c03a82b975..00000000000 --- a/compiler/bsb/bsb_ninja_gen.mli +++ /dev/null @@ -1,32 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val output_ninja_and_namespace_map : - per_proj_dir:string -> - package_kind:Bsb_package_kind.t -> - Bsb_config_types.t -> - unit -(** - generate ninja file based on [cwd] -*) diff --git a/compiler/bsb/bsb_ninja_global_vars.ml b/compiler/bsb/bsb_ninja_global_vars.ml deleted file mode 100644 index 998aeed54ce..00000000000 --- a/compiler/bsb/bsb_ninja_global_vars.ml +++ /dev/null @@ -1,32 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(* Invariant: the two string literal has - to be "a" and "$a" -*) - -(* let src_root_dir = "g_root" - - let lazy_src_root_dir = "$g_root" *) -let g_finger = "g_finger" diff --git a/compiler/bsb/bsb_ninja_regen.ml b/compiler/bsb/bsb_ninja_regen.ml deleted file mode 100644 index 12e50c4b0b9..00000000000 --- a/compiler/bsb/bsb_ninja_regen.ml +++ /dev/null @@ -1,100 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let bsdeps = ".bsdeps" - -let ( // ) = Ext_path.combine - -(** Regenerate ninja file by need based on [.bsdeps] - return None if we dont need regenerate - otherwise return Some info -*) -let regenerate_ninja ~(package_kind : Bsb_package_kind.t) ~forced ~per_proj_dir - ~warn_legacy_config ~warn_as_error : Bsb_config_types.t option = - let lib_artifacts_dir = Bsb_config.lib_bs in - let lib_bs_dir = per_proj_dir // lib_artifacts_dir in - let output_deps = lib_bs_dir // bsdeps in - let check_result = - Bsb_ninja_check.check ~package_kind ~per_proj_dir ~forced ~warn_as_error - ~file:output_deps - in - let config_filename, config_json = - Bsb_config_load.load_json ~per_proj_dir ~warn_legacy_config - in - match check_result with - | Good -> None (* Fast path, no need regenerate ninja *) - | Bsb_forced | Bsb_bsc_version_mismatch | Bsb_package_kind_inconsistent - | Bsb_file_corrupted | Bsb_file_not_exist | Bsb_source_directory_changed - | Bsb_regenerate_required | Other _ -> - Bsb_log.info "@{BSB check@} build spec : %a @." - Bsb_ninja_check.pp_check_result check_result; - if check_result = Bsb_bsc_version_mismatch then ( - Bsb_log.warn "@{Different compiler version@}: clean current repo@."; - Bsb_clean.clean_bs_deps per_proj_dir; - Bsb_clean.clean_self per_proj_dir); - - let config : Bsb_config_types.t = - Bsb_config_parse.interpret_json ~filename:config_filename - ~json:config_json ~package_kind ~per_proj_dir - in - - let warning = - match config.warning with - | None -> ( - match warn_as_error with - | Some e -> - Some {Bsb_warning.number = Some e; error = Warn_error_number e} - | None -> None) - | Some {error} as t -> ( - match (warn_as_error, error) with - | Some error_str, Warn_error_false -> - Some {number = Some error_str; error = Warn_error_number error_str} - | Some error_str, Warn_error_number prev -> - let new_error = prev ^ error_str in - Some {number = Some new_error; error = Warn_error_number new_error} - | _ -> t) - in - - let config = {config with warning} in - (* create directory, lib/bs, lib/js, lib/es6 etc *) - Bsb_build_util.mkp lib_bs_dir; - Bsb_package_specs.list_dirs_by config.package_specs (fun x -> - let dir = per_proj_dir // x in - (*Unix.EEXIST error*) - if not (Sys.file_exists dir) then Unix.mkdir dir 0o777); - (match package_kind with - | Toplevel -> - Bsb_watcher_gen.generate_sourcedirs_meta - ~name:(lib_bs_dir // Literals.sourcedirs_meta) - config.file_groups - | Dependency _ -> ()); - - Bsb_ninja_gen.output_ninja_and_namespace_map ~per_proj_dir ~package_kind - config; - (* PR2184: we still need record empty dir - since it may add files in the future *) - Bsb_ninja_check.record ~package_kind ~per_proj_dir ~config ~warn_as_error - ~file:output_deps - (config.filename :: config.file_groups.globbed_dirs); - Some config diff --git a/compiler/bsb/bsb_ninja_regen.mli b/compiler/bsb/bsb_ninja_regen.mli deleted file mode 100644 index 0af5766e57e..00000000000 --- a/compiler/bsb/bsb_ninja_regen.mli +++ /dev/null @@ -1,35 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val regenerate_ninja : - package_kind:Bsb_package_kind.t -> - forced:bool -> - per_proj_dir:string -> - warn_legacy_config:bool -> - warn_as_error:string option -> - Bsb_config_types.t option -(** Regenerate ninja file by need based on [.bsdeps] - return None if we dont need regenerate - otherwise return Some info -*) diff --git a/compiler/bsb/bsb_ninja_rule.ml b/compiler/bsb/bsb_ninja_rule.ml deleted file mode 100644 index d12f845b20c..00000000000 --- a/compiler/bsb/bsb_ninja_rule.ml +++ /dev/null @@ -1,229 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type t = { - mutable used: bool; - (* rule_name : string; *) - name: out_channel -> string; -} - -let get_name (x : t) oc = x.name oc - -let print_rule (oc : out_channel) ?description ?(restat : unit option) - ?(dyndep : unit option) ~command name = - output_string oc "rule "; - output_string oc name; - output_string oc "\n"; - output_string oc " command = "; - output_string oc command; - output_string oc "\n"; - if dyndep <> None then output_string oc " dyndep = 1\n"; - if restat <> None then output_string oc " restat = 1\n"; - match description with - | None -> () - | Some description -> - output_string oc " description = "; - output_string oc description; - output_string oc "\n" - -(** allocate an unique name for such rule*) -let define ~command ?dyndep ?restat rule_name : t = - let rec self = - { - used = false; - (* rule_name ; *) - name = - (fun oc -> - if not self.used then ( - print_rule oc ?dyndep ?restat ~command rule_name; - self.used <- true); - rule_name); - } - in - - self - -type command = string - -type builtin = { - build_ast_from_re: t; - (* build_ast_from_rei : t ; *) - (* platform dependent, on Win32, - invoking cmd.exe - *) - copy_resources: t; - (* Rules below all need restat *) - build_bin_deps: t; - build_bin_deps_dev: t; - mj: t; - mj_dev: t; - mij: t; - mij_dev: t; - mi: t; - mi_dev: t; - build_package: t; - customs: t Map_string.t; -} - -let make_custom_rules ~(gentype_config : Bsb_config_types.gentype_config) - ~(has_postbuild : string option) ~(pp_file : string option) - ~(jsx : Bsb_jsx.t) ~(digest : string) ~(package_specs : Bsb_package_specs.t) - ~(namespace : string option) ~package_name ~warnings - ~(ppx_files : Bsb_config_types.ppx list) ~bsc_flags ~(dpkg_incls : string) - ~(lib_incls : string) ~(dev_incls : string) - (custom_rules : command Map_string.t) : builtin = - let bs_dep = Ext_filename.maybe_quote Bsb_global_paths.vendor_bsdep in - let bsc = Ext_filename.maybe_quote Bsb_global_paths.vendor_bsc in - (* FIXME: We don't need set [-o ${out}] when building ast - since the default is already good -- it does not*) - let buf = Ext_buffer.create 100 in - let ns_flag = - match namespace with - | None -> "" - | Some n -> " -bs-ns " ^ n - in - let mk_ml_cmj_cmd ~(read_cmi : [`yes | `is_cmi | `no]) ~is_dev ~postbuild : - string = - Ext_buffer.clear buf; - Ext_buffer.add_string buf bsc; - Ext_buffer.add_string buf (" -runtime-path " ^ !Runtime_package.path); - Ext_buffer.add_string buf ns_flag; - if read_cmi = `yes then Ext_buffer.add_string buf " -bs-read-cmi"; - (* The include order matters below *) - if is_dev then Ext_buffer.add_char_string buf ' ' dev_incls; - Ext_buffer.add_char_string buf ' ' lib_incls; - if is_dev then Ext_buffer.add_char_string buf ' ' dpkg_incls; - Ext_buffer.add_char_string buf ' ' bsc_flags; - Ext_buffer.add_char_string buf ' ' warnings; - (* we need "-w a" in the end position to take effect - in non-toplevel mode - *) - (match gentype_config with - | false -> () - | true -> Ext_buffer.add_string buf " -bs-gentype"); - if read_cmi <> `is_cmi then ( - Ext_buffer.add_string buf " -bs-package-name "; - Ext_buffer.add_string buf (Ext_filename.maybe_quote package_name); - Ext_buffer.add_string buf - (Bsb_package_specs.package_flag_of_package_specs package_specs - ~dirname:"$in_d")); - Ext_buffer.add_string buf " $i"; - (match postbuild with - | None -> () - | Some cmd -> - Ext_buffer.add_string buf " && "; - Ext_buffer.add_string buf cmd; - Ext_buffer.add_string buf " $out_last"); - Ext_buffer.contents buf - in - let mk_ast = - Ext_buffer.clear buf; - Ext_buffer.add_string buf bsc; - Ext_buffer.add_string buf (" -runtime-path " ^ !Runtime_package.path); - Ext_buffer.add_char_string buf ' ' warnings; - (match ppx_files with - | [] -> () - | _ -> - Ext_buffer.add_char_string buf ' ' (Bsb_build_util.ppx_flags ppx_files)); - (match pp_file with - | None -> () - | Some flag -> - Ext_buffer.add_char_string buf ' ' (Bsb_build_util.pp_flag flag)); - (match jsx.version with - | Some Jsx_v4 -> Ext_buffer.add_string buf " -bs-jsx 4" - | None -> ()); - (match jsx.module_ with - | None -> () - | Some React -> Ext_buffer.add_string buf " -bs-jsx-module react" - | Some (Generic {moduleName}) -> - Ext_buffer.add_string buf (" -bs-jsx-module " ^ moduleName)); - - Ext_buffer.add_char_string buf ' ' bsc_flags; - Ext_buffer.add_string buf " -absname -bs-ast -o $out $i"; - Ext_buffer.contents buf - in - let build_ast_from_re = define ~command:mk_ast "astj" in - - let copy_resources = - define - ~command: - (if Ext_sys.is_windows_or_cygwin then "cmd.exe /C copy /Y $i $out >NUL" - else "cp $i $out") - "copy_resource" - in - - let build_bin_deps = - define ~restat:() - ~command:(bs_dep ^ " -hash " ^ digest ^ ns_flag ^ " $in") - "deps" - (* - it seems we already have restat = 1 - now it is an implicit dependency, we need avoid write duplicated files - *) - in - let build_bin_deps_dev = - define ~restat:() - ~command:(bs_dep ^ " -g -hash " ^ digest ^ ns_flag ^ " $in") - "deps_dev" - in - let aux ~name ~read_cmi ~postbuild = - ( define - ~command:(mk_ml_cmj_cmd ~read_cmi ~is_dev:false ~postbuild) - ~dyndep:() ~restat:() (* Always restat when having mli *) name, - define - ~command:(mk_ml_cmj_cmd ~read_cmi ~is_dev:true ~postbuild) - ~dyndep:() ~restat:() - (* Always restat when having mli *) (name ^ "_dev") ) - in - - let mj, mj_dev = aux ~name:"mj" ~read_cmi:`yes ~postbuild:has_postbuild in - let mij, mij_dev = aux ~read_cmi:`no ~name:"mij" ~postbuild:has_postbuild in - let mi, mi_dev = aux ~read_cmi:`is_cmi ~postbuild:None ~name:"mi" in - let build_package = - define - ~command: - (bsc ^ " -w -49 -color always -no-alias-deps -runtime-path " - ^ !Runtime_package.path ^ " $i") - ~restat:() "build_package" - in - { - build_ast_from_re; - (* platform dependent, on Win32, - invoking cmd.exe - *) - copy_resources; - (* Rules below all need restat *) - build_bin_deps; - build_bin_deps_dev; - mj; - mj_dev; - mij; - mi; - mij_dev; - mi_dev; - build_package; - customs = - Map_string.mapi custom_rules (fun name command -> - define ~command ("custom_" ^ name)); - } diff --git a/compiler/bsb/bsb_ninja_rule.mli b/compiler/bsb/bsb_ninja_rule.mli deleted file mode 100644 index b7678e0c932..00000000000 --- a/compiler/bsb/bsb_ninja_rule.mli +++ /dev/null @@ -1,83 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type t -(** The complexity comes from the fact that we allow custom rules which could - conflict with our custom built-in rules -*) - -val get_name : t -> out_channel -> string - -(***********************************************************) - -type builtin = { - build_ast_from_re: t; - (* platform dependent, on Win32, - invoking cmd.exe - *) - copy_resources: t; - (* Rules below all need restat *) - build_bin_deps: t; - build_bin_deps_dev: t; - mj: t; - mj_dev: t; - mij: t; - mij_dev: t; - mi: t; - mi_dev: t; - build_package: t; - customs: t Map_string.t; -} -(** A list of existing rules *) - -(***********************************************************) - -(** rules are generally composed of built-in rules and customized rules, there are two design choices: - 1. respect custom rules with the same name, then we need adjust our built-in - rules dynamically in case the conflict. - 2. respect our built-in rules, then we only need re-load custom rules for each rescript.json -*) - -type command = string - -(* Since now we generate ninja files per rescript.json in a single process, - we must make sure it is re-entrant -*) -val make_custom_rules : - gentype_config:Bsb_config_types.gentype_config -> - has_postbuild:string option -> - pp_file:string option -> - jsx:Bsb_jsx.t -> - digest:string -> - package_specs:Bsb_package_specs.t -> - namespace:string option -> - package_name:string -> - warnings:string -> - ppx_files:Bsb_config_types.ppx list -> - bsc_flags:string -> - dpkg_incls:string -> - lib_incls:string -> - dev_incls:string -> - command Map_string.t -> - builtin diff --git a/compiler/bsb/bsb_ninja_targets.ml b/compiler/bsb/bsb_ninja_targets.ml deleted file mode 100644 index 61167b8acc4..00000000000 --- a/compiler/bsb/bsb_ninja_targets.ml +++ /dev/null @@ -1,55 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let oc_list xs oc = - Ext_list.iter xs (fun s -> - output_string oc Ext_string.single_space; - output_string oc s) - -let output_build ~outputs ~inputs ~rule oc = - let rule = Bsb_ninja_rule.get_name rule oc in - (* Trigger building if not used *) - output_string oc "o"; - oc_list outputs oc; - output_string oc " : "; - output_string oc rule; - oc_list inputs oc; - output_string oc "\n" - -let phony ?(order_only_deps = []) ~inputs ~output oc = - output_string oc "o "; - output_string oc output; - output_string oc " : "; - output_string oc "phony"; - oc_list inputs oc; - if order_only_deps <> [] then ( - output_string oc " ||"; - oc_list order_only_deps oc); - output_string oc "\n" - -let output_finger key value oc = - output_string oc key; - output_string oc " := "; - output_string oc value; - output_string oc "\n" diff --git a/compiler/bsb/bsb_ninja_targets.mli b/compiler/bsb/bsb_ninja_targets.mli deleted file mode 100644 index 0a629579ba5..00000000000 --- a/compiler/bsb/bsb_ninja_targets.mli +++ /dev/null @@ -1,43 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val output_build : - outputs:string list -> - inputs:string list -> - rule:Bsb_ninja_rule.t -> - out_channel -> - unit -(** output should always be marked explicitly, - otherwise the build system can not figure out clearly - however, for the command we don't need pass `-o` -*) - -val phony : - ?order_only_deps:string list -> - inputs:string list -> - output:string -> - out_channel -> - unit - -val output_finger : string -> string -> out_channel -> unit diff --git a/compiler/bsb/bsb_package_kind.ml b/compiler/bsb/bsb_package_kind.ml deleted file mode 100644 index f014aa884d5..00000000000 --- a/compiler/bsb/bsb_package_kind.ml +++ /dev/null @@ -1,39 +0,0 @@ -(* Copyright (C) 2020 - Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type dep_payload = {package_specs: Bsb_package_specs.t; jsx: Bsb_jsx.t} - -type t = Toplevel | Dependency of dep_payload -(* This package specs comes from the toplevel to - override the current settings -*) - -let encode_no_nl (x : t) = - match x with - | Toplevel -> "0" - | Dependency x -> - "1" - ^ Bsb_package_specs.package_flag_of_package_specs x.package_specs - ~dirname:"." - ^ Bsb_jsx.encode_no_nl x.jsx diff --git a/compiler/bsb/bsb_package_specs.ml b/compiler/bsb/bsb_package_specs.ml deleted file mode 100644 index ad80b989141..00000000000 --- a/compiler/bsb/bsb_package_specs.ml +++ /dev/null @@ -1,207 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine - -(*FIXME: use assoc list instead *) -module Spec_set = Bsb_spec_set - -type t = {modules: Spec_set.t} - -let ( .?() ) = Map_string.find_opt - -let bad_module_format_message_exn ~loc format = - Bsb_exception.errorf ~loc - "package-specs: `%s` isn't a valid output module format. It has to be one \ - of: %s or %s" - format Literals.esmodule Literals.commonjs - -let supported_format (x : string) loc : Ext_module_system.t = - let _ = - if x = Literals.es6 || x = Literals.es6_global then - let loc_end = - {loc with Lexing.pos_cnum = loc.Lexing.pos_cnum + String.length x} - in - let loc = {Warnings.loc_start = loc; loc_end; loc_ghost = false} in - Location.deprecated ~can_be_automigrated:false loc - (Printf.sprintf "Option \"%s\" is deprecated. Use \"%s\" instead." x - Literals.esmodule) - in - if x = Literals.es6 || x = Literals.esmodule then Esmodule - else if x = Literals.commonjs then Commonjs - else if x = Literals.es6_global then Es6_global - else bad_module_format_message_exn ~loc x - -let string_of_format (x : Ext_module_system.t) = - match x with - | Commonjs -> Literals.commonjs - | Esmodule -> Literals.esmodule - | Es6_global -> Literals.es6_global - -let js_suffix_regexp = Str.regexp "[A-Za-z0-9-_.]*\\.[cm]?js" - -let validate_js_suffix suffix = Str.string_match js_suffix_regexp suffix 0 - -let rec from_array suffix (arr : Ext_json_types.t array) : Spec_set.t = - let spec = ref Spec_set.empty in - let has_in_source = ref false in - Ext_array.iter arr (fun x -> - let result = from_json_single suffix x in - if result.in_source then - if not !has_in_source then has_in_source := true - else - Bsb_exception.errorf ~loc:(Ext_json.loc_of x) - "package-specs: detected two module formats that are both \ - configured to be in-source."; - spec := Spec_set.add result !spec); - !spec - -(* TODO: FIXME: better API without mutating *) -and from_json_single suffix (x : Ext_json_types.t) : Bsb_spec_set.spec = - match x with - | Str {str = format; loc} -> - {format = supported_format format loc; in_source = false; suffix} - | Obj {map; loc} -> ( - match map.?("module") with - | Some (Str {str = format}) -> - let in_source = - match map.?(Bsb_build_schemas.in_source) with - | Some (True _) -> true - | Some _ | None -> false - in - let suffix = - match map.?(Bsb_build_schemas.suffix) with - | Some (Str {str = suffix; _}) when validate_js_suffix suffix -> suffix - | Some (Str {str; loc}) -> - Bsb_exception.errorf ~loc - "invalid suffix \"%s\". The suffix and may contain letters, \ - digits, \"-\", \"_\" and \".\" and must end with .js, .mjs or \ - .cjs." - str - | Some _ -> - Bsb_exception.errorf ~loc:(Ext_json.loc_of x) - "expected a string extension like \".js\"" - | None -> suffix - in - {format = supported_format format loc; in_source; suffix} - | Some _ -> - Bsb_exception.errorf ~loc - "package-specs: when the configuration is an object, `module` field \ - should be a string, not an array. If you want to pass multiple module \ - specs, try turning package-specs into an array of objects (or \ - strings) instead." - | None -> - Bsb_exception.errorf ~loc - "package-specs: when the configuration is an object, the `module` \ - field is mandatory.") - | _ -> - Bsb_exception.errorf ~loc:(Ext_json.loc_of x) - "package-specs: expected either a string or an object." - -let from_json suffix (x : Ext_json_types.t) : Spec_set.t = - match x with - | Arr {content; _} -> from_array suffix content - | _ -> Spec_set.singleton (from_json_single suffix x) - -let bs_package_output = "-bs-package-output" - -[@@@warning "+9"] - -let package_flag ({format; in_source; suffix} : Bsb_spec_set.spec) dir = - Ext_string.inter2 bs_package_output - (Ext_string.concat5 (string_of_format format) Ext_string.single_colon - (if in_source then dir else Bsb_config.top_prefix_of_format format // dir) - Ext_string.single_colon suffix) - -(* FIXME: we should adapt it *) -let package_flag_of_package_specs (package_specs : t) ~(dirname : string) : - string = - let res = - match (package_specs.modules :> Bsb_spec_set.spec list) with - | [] -> Ext_string.empty - | [format] -> - Ext_string.inter2 Ext_string.empty (package_flag format dirname) - | [a; b] -> - Ext_string.inter3 Ext_string.empty (package_flag a dirname) - (package_flag b dirname) - | [a; b; c] -> - Ext_string.inter4 Ext_string.empty (package_flag a dirname) - (package_flag b dirname) (package_flag c dirname) - | _ -> - Spec_set.fold - (fun format acc -> Ext_string.inter2 acc (package_flag format dirname)) - package_specs.modules Ext_string.empty - in - res - -let default_package_specs suffix = - (* TODO: swap default to Esmodule in v12 *) - Spec_set.singleton {format = Commonjs; in_source = false; suffix} - -(** - [get_list_of_output_js specs "src/hi/hello"] - -*) -let get_list_of_output_js (package_specs : t) - (output_file_sans_extension : string) = - Spec_set.fold - (fun (spec : Bsb_spec_set.spec) acc -> - let basename = - Ext_namespace.change_ext_ns_suffix output_file_sans_extension - spec.suffix - in - (if spec.in_source then Bsb_config.rev_lib_bs_prefix basename - else Bsb_config.lib_bs_prefix_of_format spec.format // basename) - :: acc) - package_specs.modules [] - -let list_dirs_by (package_specs : t) (f : string -> unit) = - Spec_set.iter - (fun (spec : Bsb_spec_set.spec) -> - if not spec.in_source then f (Bsb_config.top_prefix_of_format spec.format)) - package_specs.modules - -type json_map = Ext_json_types.t Map_string.t - -let extract_js_suffix_exn (map : json_map) : string = - match map.?(Bsb_build_schemas.suffix) with - | None -> Literals.suffix_js - | Some (Str {str = suffix; _}) when validate_js_suffix suffix -> suffix - | Some (Str {str; _} as config) -> - Bsb_exception.config_error config - ("invalid suffix \"" ^ str - ^ "\". The suffix and may contain letters, digits, \"-\", \"_\" and \".\" \ - and must end with .js, .mjs or .cjs.") - | Some config -> - Bsb_exception.config_error config "expected a string extension like \".js\"" - -let from_map ~(cwd : string) map = - ignore cwd; - let suffix = extract_js_suffix_exn map in - let modules = - match map.?(Bsb_build_schemas.package_specs) with - | Some x -> from_json suffix x - | None -> default_package_specs suffix - in - {modules} diff --git a/compiler/bsb/bsb_package_specs.mli b/compiler/bsb/bsb_package_specs.mli deleted file mode 100644 index f797dd78788..00000000000 --- a/compiler/bsb/bsb_package_specs.mli +++ /dev/null @@ -1,34 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type t - -val from_map : cwd:string -> Ext_json_types.t Map_string.t -> t - -val get_list_of_output_js : t -> string -> string list - -val package_flag_of_package_specs : t -> dirname:string -> string - -(* used to ensure each dir does exist *) -val list_dirs_by : t -> (string -> unit) -> unit diff --git a/compiler/bsb/bsb_parse_sources.ml b/compiler/bsb/bsb_parse_sources.ml deleted file mode 100644 index b829d616c34..00000000000 --- a/compiler/bsb/bsb_parse_sources.ml +++ /dev/null @@ -1,438 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type build_generator = Bsb_file_groups.build_generator - -let ( .?() ) = Map_string.find_opt - -(* type file_group = Bsb_file_groups.file_group *) - -type t = Bsb_file_groups.t - -let is_input_or_output (xs : build_generator list) (x : string) = - Ext_list.exists xs (fun {input; output} -> - let it_is y = y = x in - Ext_list.exists input it_is || Ext_list.exists output it_is) - -let errorf x fmt = Bsb_exception.errorf ~loc:(Ext_json.loc_of x) fmt - -type cxt = { - package_kind: Bsb_package_kind.t; - is_dev: bool; - cwd: string; - root: string; - cut_generators: bool; - traverse: bool; - (* namespace : string option; *) - ignored_dirs: Set_string.t; -} - -(** [public] has a list of modules, we do a sanity check to see if all the listed - modules are indeed valid module components -*) -let collect_pub_modules (xs : Ext_json_types.t array) (cache : Bsb_db.map) : - Set_string.t = - let set = ref Set_string.empty in - for i = 0 to Array.length xs - 1 do - let v = Array.unsafe_get xs i in - match v with - | Str {str; loc} -> - if Map_string.mem cache str then set := Set_string.add !set str - else - Bsb_exception.errorf ~loc "%S in public is not an existing module" str - | _ -> - Bsb_exception.errorf ~loc:(Ext_json.loc_of v) - "public expects a list of strings" - done; - !set - -let extract_pub (input : Ext_json_types.t Map_string.t) - (cur_sources : Bsb_db.map) : Bsb_file_groups.public = - match input.?(Bsb_build_schemas.public) with - | Some (Str {str = s} as x) -> - if s = Bsb_build_schemas.export_all then Export_all - else if s = Bsb_build_schemas.export_none then Export_none - else errorf x "invalid str for %s " s - | Some (Arr {content}) -> Export_set (collect_pub_modules content cur_sources) - | Some config -> Bsb_exception.config_error config "expect array or string" - | None -> Export_all - -let extract_resources (input : Ext_json_types.t Map_string.t) : string list = - match input.?(Bsb_build_schemas.resources) with - | Some (Arr x) -> Bsb_build_util.get_list_string x.content - | Some config -> Bsb_exception.config_error config "expect array " - | None -> [] - -let extract_input_output (edge : Ext_json_types.t) : string list * string list = - let error () = - errorf edge {| invalid edge format, expect ["output" , ":", "input" ]|} - in - match edge with - | Arr {content} -> ( - match - Ext_array.find_and_split content - (fun x () -> - match x with - | Str {str = ":"} -> true - | _ -> false) - () - with - | No_split -> error () - | Split (output, input) -> - ( Ext_array.to_list_map output (fun x -> - match x with - | Str {str = ":"} -> error () - | Str {str} -> Some str - | _ -> None), - Ext_array.to_list_map input (fun x -> - match x with - | Str {str = ":"} -> error () - | Str {str} -> - Some str - (* More rigirous error checking: It would trigger a ninja syntax error *) - | _ -> None) )) - | _ -> error () - -type json_map = Ext_json_types.t Map_string.t - -let extract_generators (input : json_map) : build_generator list = - match input.?(Bsb_build_schemas.generators) with - | Some (Arr {content; loc_start = _}) -> - (* Need check is dev build or not *) - Ext_array.fold_left content [] (fun acc x -> - match x with - | Obj {map} -> ( - match - (map.?(Bsb_build_schemas.name), map.?(Bsb_build_schemas.edge)) - with - | Some (Str command), Some edge -> - let output, input = extract_input_output edge in - {Bsb_file_groups.input; output; command = command.str} :: acc - | _ -> errorf x "Invalid generator format") - | _ -> errorf x "Invalid generator format") - | Some x -> errorf x "Invalid generator format" - | None -> [] - -let extract_predicate (m : json_map) : string -> bool = - let excludes = - match m.?(Bsb_build_schemas.excludes) with - | None -> [] - | Some (Arr {content = arr}) -> Bsb_build_util.get_list_string arr - | Some x -> Bsb_exception.config_error x "excludes expect array " - in - let slow_re = m.?(Bsb_build_schemas.slow_re) in - match (slow_re, excludes) with - | Some (Str {str = s}), [] -> - let re = Str.regexp s in - fun name -> Str.string_match re name 0 - | Some (Str {str = s}), _ :: _ -> - let re = Str.regexp s in - fun name -> - Str.string_match re name 0 && not (Ext_list.mem_string excludes name) - | Some config, _ -> - Bsb_exception.config_error config - (Bsb_build_schemas.slow_re ^ " expect a string literal") - | None, _ -> fun name -> not (Ext_list.mem_string excludes name) - -(** [parsing_source_dir_map cxt input] - Major work done in this function, - assume [not toplevel && not (Bsb_dir_index.is_lib_dir dir_index)] - is already checked, so we don't need check it again -*) - -(** This is the only place where we do some removal during scanning, - configurabl -*) - -(********************************************************************) -(* starts parsing *) -let rec parsing_source_dir_map ({cwd = dir} as cxt) - (input : Ext_json_types.t Map_string.t) : Bsb_file_groups.t = - if Set_string.mem cxt.ignored_dirs dir then Bsb_file_groups.empty - else - let cur_globbed_dirs = ref false in - let has_generators = - match cxt with - | {cut_generators = false; package_kind = Toplevel} -> true - | {cut_generators = false; package_kind = Dependency _} - | {cut_generators = true; _} -> - false - in - let scanned_generators = extract_generators input in - let sub_dirs_field = input.?(Bsb_build_schemas.subdirs) in - let base_name_array = - lazy - (cur_globbed_dirs := true; - Sys.readdir (Filename.concat cxt.root dir)) - in - let output_sources = - Ext_list.fold_left - (Ext_list.flat_map scanned_generators (fun x -> x.output)) - Map_string.empty - (fun acc o -> Bsb_db_util.add_basename ~dir acc o) - in - let sources = - match input.?(Bsb_build_schemas.files) with - | None -> - (* We should avoid temporary files *) - Ext_array.fold_left (Lazy.force base_name_array) output_sources - (fun acc basename -> - if is_input_or_output scanned_generators basename then acc - else Bsb_db_util.add_basename ~dir acc basename) - | Some (Arr basenames) -> - Ext_array.fold_left basenames.content output_sources - (fun acc basename -> - match basename with - | Str {str = basename; loc} -> - Bsb_db_util.add_basename ~dir acc basename - ~error_on_invalid_suffix:loc - | _ -> acc) - | Some (Obj {map; loc = _}) -> - (* { excludes : [], slow_re : "" }*) - let predicate = extract_predicate map in - Ext_array.fold_left (Lazy.force base_name_array) output_sources - (fun acc basename -> - if - is_input_or_output scanned_generators basename - || not (predicate basename) - then acc - else Bsb_db_util.add_basename ~dir acc basename) - | Some x -> - Bsb_exception.config_error x "files field expect array or object " - in - let resources = extract_resources input in - let public = extract_pub input sources in - (* Doing recursive stuff *) - let children = - match (sub_dirs_field, cxt.traverse) with - | None, true | Some (True _), _ -> - let root = cxt.root in - let parent = Filename.concat root dir in - Ext_array.fold_left (Lazy.force base_name_array) Bsb_file_groups.empty - (fun origin x -> - if - (not (Set_string.mem cxt.ignored_dirs x)) - && Ext_sys.is_directory_no_exn (Filename.concat parent x) - then - Bsb_file_groups.merge - (parsing_source_dir_map - { - cxt with - cwd = - Ext_path.concat cxt.cwd - (Ext_path.simple_convert_node_path_to_os_path x); - traverse = true; - } - Map_string.empty) - origin - else origin) - (* readdir parent avoiding scanning twice *) - | None, false | Some (False _), _ -> Bsb_file_groups.empty - | Some s, _ -> parse_sources cxt s - in - (* Do some clean up *) - (* prune_staled_bs_js_files cxt sources ; *) - Bsb_file_groups.cons - ~file_group: - { - dir; - sources; - resources; - public; - is_dev = cxt.is_dev; - generators = (if has_generators then scanned_generators else []); - } - ?globbed_dir:(if !cur_globbed_dirs then Some dir else None) - children - -and parsing_single_source ({package_kind; is_dev; cwd} as cxt) - (x : Ext_json_types.t) : t = - match x with - | Str {str = dir} -> ( - match (package_kind, is_dev) with - | Dependency _, true -> Bsb_file_groups.empty - | Dependency _, false | Toplevel, _ -> - parsing_source_dir_map - { - cxt with - cwd = - Ext_path.concat cwd - (Ext_path.simple_convert_node_path_to_os_path dir); - } - Map_string.empty) - | Obj {map} -> ( - let current_dir_index = - match map.?(Bsb_build_schemas.type_) with - | Some (Str {str = "dev"}) -> true - | Some _ -> - Bsb_exception.config_error x {|type field expect "dev" literal |} - | None -> is_dev - in - match (package_kind, current_dir_index) with - | Dependency _, true -> Bsb_file_groups.empty - | Dependency _, false | Toplevel, _ -> - let dir = - match map.?(Bsb_build_schemas.dir) with - | Some (Str {str}) -> - if str = Literals.library_file then - Bsb_exception.config_error x - (Printf.sprintf "dir field should be different from `%s`" - Literals.library_file) - else Ext_path.simple_convert_node_path_to_os_path str - | Some x -> Bsb_exception.config_error x "dir expected to be a string" - | None -> - Bsb_exception.config_error x - ("required field :" ^ Bsb_build_schemas.dir ^ " missing") - in - - parsing_source_dir_map - {cxt with is_dev = current_dir_index; cwd = Ext_path.concat cwd dir} - map) - | _ -> Bsb_file_groups.empty - -and parsing_arr_sources cxt (file_groups : Ext_json_types.t array) = - Ext_array.fold_left file_groups Bsb_file_groups.empty (fun origin x -> - Bsb_file_groups.merge (parsing_single_source cxt x) origin) - -and parse_sources (cxt : cxt) (sources : Ext_json_types.t) = - match sources with - | Arr file_groups -> parsing_arr_sources cxt file_groups.content - | _ -> parsing_single_source cxt sources - -let scan ~package_kind ~root ~cut_generators - ~(* ~namespace *) - ignored_dirs x : t = - parse_sources - { - ignored_dirs; - package_kind; - is_dev = false; - cwd = Filename.current_dir_name; - root; - cut_generators; - (* namespace; *) - traverse = false; - } - x - -(* Walk through to do some work *) -type walk_cxt = { - cwd: string; - root: string; - traverse: bool; - ignored_dirs: Set_string.t; - gentype_language: string; -} - -let rec walk_sources (cxt : walk_cxt) (sources : Ext_json_types.t) = - match sources with - | Arr {content} -> Ext_array.iter content (fun x -> walk_single_source cxt x) - | x -> walk_single_source cxt x - -and walk_single_source cxt (x : Ext_json_types.t) = - match x with - | Str {str = dir} -> - let dir = Ext_path.simple_convert_node_path_to_os_path dir in - walk_source_dir_map {cxt with cwd = Ext_path.concat cxt.cwd dir} None - | Obj {map} -> ( - match map.?(Bsb_build_schemas.dir) with - | Some (Str {str}) -> - let dir = Ext_path.simple_convert_node_path_to_os_path str in - walk_source_dir_map - {cxt with cwd = Ext_path.concat cxt.cwd dir} - map.?(Bsb_build_schemas.subdirs) - | _ -> ()) - | _ -> () - -and walk_source_dir_map (cxt : walk_cxt) sub_dirs_field = - let working_dir = Filename.concat cxt.root cxt.cwd in - if not (Set_string.mem cxt.ignored_dirs cxt.cwd) then ( - let file_array = Sys.readdir working_dir in - (* Remove .gen.js/.gen.tsx during clean up *) - Ext_array.iter file_array (fun file -> - let is_typescript = cxt.gentype_language = "typescript" in - if - (not is_typescript) - && Ext_string.ends_with file Literals.suffix_gen_js - || (is_typescript && Ext_string.ends_with file Literals.suffix_gen_tsx) - then Sys.remove (Filename.concat working_dir file)); - let cxt_traverse = cxt.traverse in - match (sub_dirs_field, cxt_traverse) with - | None, true | Some (True _), _ -> - Ext_array.iter file_array (fun f -> - if - (not (Set_string.mem cxt.ignored_dirs f)) - && Ext_sys.is_directory_no_exn (Filename.concat working_dir f) - then - walk_source_dir_map - { - cxt with - cwd = - Ext_path.concat cxt.cwd - (Ext_path.simple_convert_node_path_to_os_path f); - traverse = true; - } - None) - | None, _ | Some (False _), _ -> () - | Some s, _ -> walk_sources cxt s) - -(* It makes use of the side effect when [walk_sources], removing suffix_re_js, - TODO: make it configurable -*) -let clean_re_js root = - match - Ext_json_parse.parse_json_from_file - (Filename.concat root Literals.bsconfig_json) - with - | Obj {map} -> - let ignored_dirs = - match map.?(Bsb_build_schemas.ignored_dirs) with - | Some (Arr {content = x}) -> - Set_string.of_list (Bsb_build_util.get_list_string x) - | Some _ | None -> Set_string.empty - in - let gentype_language = - match map.?(Bsb_build_schemas.gentypeconfig) with - | None -> "" - | Some (Obj {map}) -> ( - match map.?(Bsb_build_schemas.language) with - | None -> "" - | Some (Str {str}) -> str - | Some _ -> "") - | Some _ -> "" - in - Ext_option.iter map.?(Bsb_build_schemas.sources) (fun config -> - try - walk_sources - { - root; - traverse = true; - cwd = Filename.current_dir_name; - ignored_dirs; - gentype_language; - } - config - with _ -> ()) - | _ -> () - | exception _ -> () diff --git a/compiler/bsb/bsb_parse_sources.mli b/compiler/bsb/bsb_parse_sources.mli deleted file mode 100644 index 8e48631eee5..00000000000 --- a/compiler/bsb/bsb_parse_sources.mli +++ /dev/null @@ -1,44 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val scan : - package_kind:Bsb_package_kind.t -> - root:string -> - cut_generators:bool -> - (* namespace : string option -> *) - ignored_dirs:Set_string.t -> - Ext_json_types.t -> - Bsb_file_groups.t -(** [scan .. cxt json] - entry is to the [sources] in the schema - given a root, return an object which is - all relative paths, this function will do the IO -*) - -val clean_re_js : string -> unit -(** This function has some duplication - from [scan], - the parsing assuming the format is - already valid -*) diff --git a/compiler/bsb/bsb_pkg.ml b/compiler/bsb/bsb_pkg.ml deleted file mode 100644 index 47a3f22bc04..00000000000 --- a/compiler/bsb/bsb_pkg.ml +++ /dev/null @@ -1,95 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Filename.concat - -type t = Bsb_pkg_types.t - -let make_sub_path (x : t) : string = - Literals.node_modules // Bsb_pkg_types.to_string x - -let node_paths : string list Lazy.t = - lazy - (try - Ext_string.split (Sys.getenv "NODE_PATH") - (if Sys.win32 then ';' else ':') - with _ -> []) - -(** It makes sense to have this function raise, when [bsb] could not resolve a package, it used to mean - a failure -*) -let check_dir dir = - match Sys.file_exists dir with - | true -> Some dir - | false -> None - -let resolve_bs_package_aux ~cwd (pkg : t) = - (* First try to resolve recursively from the current working directory *) - let sub_path = make_sub_path pkg in - let rec aux cwd = - let abs_marker = cwd // sub_path in - if Sys.file_exists abs_marker then abs_marker - else - let another_cwd = Filename.dirname cwd in - (* TODO: may non-terminating when see symlinks *) - if String.length another_cwd < String.length cwd then aux another_cwd - else - (* To the end try other possiblilities [NODE_PATH]*) - match - Ext_list.find_opt (Lazy.force node_paths) (fun dir -> - check_dir (dir // Bsb_pkg_types.to_string pkg)) - with - | Some resolved_dir -> resolved_dir - | None -> Bsb_exception.package_not_found ~pkg - in - aux cwd - -module Coll = Hash.Make (struct - type nonrec t = t - - let equal = Bsb_pkg_types.equal - - let hash (x : t) = Hashtbl.hash x -end) - -let cache : string Coll.t = Coll.create 0 - -let to_list cb = Coll.to_list cache cb - -(** TODO: collect all warnings and print later *) -let resolve_bs_package ~cwd (package : t) = - match Coll.find_opt cache package with - | None -> - let result = resolve_bs_package_aux ~cwd package in - Bsb_log.info "@{Package@} %a -> %s@." Bsb_pkg_types.print package - result; - Coll.add cache package result; - result - | Some x -> - let result = resolve_bs_package_aux ~cwd package in - if not (Bsb_real_path.is_same_paths_via_io result x) then - Bsb_log.warn - "@{Duplicated package:@} %a %s (chosen) vs %s in %s @." - Bsb_pkg_types.print package x result cwd; - x diff --git a/compiler/bsb/bsb_pkg.mli b/compiler/bsb/bsb_pkg.mli deleted file mode 100644 index 2359b41dadd..00000000000 --- a/compiler/bsb/bsb_pkg.mli +++ /dev/null @@ -1,38 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(** [resolve cwd module_name], - [cwd] is current working directory, absolute path - Trying to find paths to load [module_name] - it is sepcialized for option [-bs-package-include] which requires - [npm_package_name/lib/ocaml] - - it relies on [npm_config_prefix] env variable for global npm modules -*) - -val resolve_bs_package : cwd:string -> Bsb_pkg_types.t -> string -(** @raise when not found *) - -val to_list : (Bsb_pkg_types.t -> string -> 'a) -> 'a list -(** used by watcher *) diff --git a/compiler/bsb/bsb_pkg_types.ml b/compiler/bsb/bsb_pkg_types.ml deleted file mode 100644 index 2b060fb829d..00000000000 --- a/compiler/bsb/bsb_pkg_types.ml +++ /dev/null @@ -1,88 +0,0 @@ -(* Copyright (C) 2018- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Filename.concat - -type t = Global of string | Scope of string * scope - -and scope = string - -let to_string (x : t) = - match x with - | Global s -> s - | Scope (s, scope) -> scope // s - -let print fmt (x : t) = - match x with - | Global s -> Format.pp_print_string fmt s - | Scope (name, scope) -> Format.fprintf fmt "%s/%s" scope name - -let equal (x : t) y = - match (x, y) with - | Scope (a0, a1), Scope (b0, b1) -> a0 = b0 && a1 = b1 - | Global a0, Global b0 -> a0 = b0 - | Scope _, Global _ | Global _, Scope _ -> false - -(** - input: {[ - @hello/yy/xx - hello/yy - ]} - FIXME: fix invalid input - {[ - hello//xh//helo - ]} -*) -let extract_pkg_name_and_file (s : string) = - let len = String.length s in - assert (len > 0); - let v = String.unsafe_get s 0 in - if v = '@' then ( - let scope_id = Ext_string.no_slash_idx s in - assert (scope_id > 0); - let pkg_id = Ext_string.no_slash_idx_from s (scope_id + 1) in - let scope = String.sub s 0 scope_id in - - if pkg_id < 0 then - (Scope (String.sub s (scope_id + 1) (len - scope_id - 1), scope), "") - else - ( Scope (String.sub s (scope_id + 1) (pkg_id - scope_id - 1), scope), - String.sub s (pkg_id + 1) (len - pkg_id - 1) )) - else - let pkg_id = Ext_string.no_slash_idx s in - if pkg_id < 0 then (Global s, "") - else - ( Global (String.sub s 0 pkg_id), - String.sub s (pkg_id + 1) (len - pkg_id - 1) ) - -let string_as_package (s : string) : t = - let len = String.length s in - assert (len > 0); - let v = String.unsafe_get s 0 in - if v = '@' then ( - let scope_id = Ext_string.no_slash_idx s in - assert (scope_id > 0); - Scope - (String.sub s (scope_id + 1) (len - scope_id - 1), String.sub s 0 scope_id)) - else Global s diff --git a/compiler/bsb/bsb_pkg_types.mli b/compiler/bsb/bsb_pkg_types.mli deleted file mode 100644 index ebb1662e186..00000000000 --- a/compiler/bsb/bsb_pkg_types.mli +++ /dev/null @@ -1,39 +0,0 @@ -(* Copyright (C) 2019- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type t = Global of string | Scope of string * scope - -and scope = string - -val to_string : t -> string - -val print : Format.formatter -> t -> unit - -val equal : t -> t -> bool - -(* The second element could be empty or dropped -*) -val extract_pkg_name_and_file : string -> t * string - -val string_as_package : string -> t diff --git a/compiler/bsb/bsb_real_path.ml b/compiler/bsb/bsb_real_path.ml deleted file mode 100644 index 47a6f91a496..00000000000 --- a/compiler/bsb/bsb_real_path.ml +++ /dev/null @@ -1,52 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Filename.concat - -let normalize_exn (s : string) : string = - let old_cwd = Sys.getcwd () in - Unix.chdir s; - let normalized = Sys.getcwd () in - Unix.chdir old_cwd; - normalized - -let real_path p = - match Sys.is_directory p with - | exception _ -> - let rec resolve dir = - if Sys.file_exists dir then normalize_exn dir - else - let parent = Filename.dirname dir in - if dir = parent then dir else resolve parent // Filename.basename dir - in - let p = if Filename.is_relative p then Sys.getcwd () // p else p in - resolve p - | true -> normalize_exn p - | false -> ( - let dir = normalize_exn (Filename.dirname p) in - match Filename.basename p with - | "." -> dir - | base -> dir // base) - -let is_same_paths_via_io a b = if a = b then true else real_path a = real_path b diff --git a/compiler/bsb/bsb_real_path.mli b/compiler/bsb/bsb_real_path.mli deleted file mode 100644 index 65a93ef7f8e..00000000000 --- a/compiler/bsb/bsb_real_path.mli +++ /dev/null @@ -1,25 +0,0 @@ -(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val is_same_paths_via_io : string -> string -> bool diff --git a/compiler/bsb/bsb_regex.ml b/compiler/bsb/bsb_regex.ml deleted file mode 100644 index 603004e0b37..00000000000 --- a/compiler/bsb/bsb_regex.ml +++ /dev/null @@ -1,59 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let string_after s n = String.sub s n (String.length s - n) - -(* There seems to be a bug in {!Str.global_substitute} - {[ - Str.global_substitute (Str.regexp "\\${rescript:\\([-a-zA-Z0-9]+\\)}") (fun x -> (x^":found")) {| ${rescript:hello-world} ${rescript:x} ${x}|} ;; - - : bytes = - " ${rescript:hello-world} ${rescript:x} ${x}:found ${rescript:hello-world} ${rescript:x} ${x}:found ${x}" - ]} -*) -let global_substitute text ~reg:expr repl_fun = - let text_len = String.length text in - let expr = Str.regexp expr in - let rec replace accu start last_was_empty = - let startpos = if last_was_empty then start + 1 else start in - if startpos > text_len then string_after text start :: accu - else - match Str.search_forward expr text startpos with - | exception Not_found -> string_after text start :: accu - | pos -> - let end_pos = Str.match_end () in - let matched = Str.matched_string text in - let groups = - let rec aux n acc = - match Str.matched_group n text with - | exception (Not_found | Invalid_argument _) -> acc - | v -> aux (succ n) (v :: acc) - in - aux 1 [] - in - let repl_text = repl_fun matched groups in - replace - (repl_text :: String.sub text start (pos - start) :: accu) - end_pos (end_pos = pos) - in - String.concat "" (List.rev (replace [] 0 false)) diff --git a/compiler/bsb/bsb_regex.mli b/compiler/bsb/bsb_regex.mli deleted file mode 100644 index 747b9fdb2d8..00000000000 --- a/compiler/bsb/bsb_regex.mli +++ /dev/null @@ -1,27 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val global_substitute : - string -> reg:string -> (string -> string list -> string) -> string -(** Used in `bsb -init` command *) diff --git a/compiler/bsb/bsb_spec_set.ml b/compiler/bsb/bsb_spec_set.ml deleted file mode 100644 index d60bc4374af..00000000000 --- a/compiler/bsb/bsb_spec_set.ml +++ /dev/null @@ -1,81 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -[@@@warning "+9"] - -(* TODO: sync up with {!Js_packages_info.module_system} *) -type format = Ext_module_system.t - -type spec = {format: format; in_source: bool; suffix: string} - -type t = spec list - -let cmp (s1 : spec) ({format; in_source; suffix} : spec) = - let v = compare s1.format format in - if v <> 0 then v - else - let v = compare s1.in_source in_source in - if v <> 0 then v else compare s1.suffix suffix - -let empty = [] - -let rec insert lst piviot = - match lst with - | [] -> [piviot] - | x :: xs -> - let v = cmp piviot x in - if v = 0 then lst - else if v < 0 then piviot :: lst - else x :: insert xs piviot - -let add spec specs = - match specs with - | [] -> [spec] - | [a] -> - let v = cmp spec a in - if v < 0 then spec :: specs else if v = 0 then specs else [a; spec] - | [a; b] -> - let v = cmp spec a in - if v < 0 then spec :: specs - else if v = 0 then specs - else - let v1 = cmp spec b in - if v < 0 then [a; spec; b] else if v1 = 0 then specs else [a; b; spec] - | _ :: _ :: _ :: _ -> - (* unlikely to happen *) - insert specs spec - -let singleton x = [x] - -let rec fold f t acc = - match t with - | [] -> acc - | x :: xs -> fold f xs (f x acc) - -let rec iter f t = - match t with - | [] -> () - | x :: xs -> - f x; - iter f xs diff --git a/compiler/bsb/bsb_spec_set.mli b/compiler/bsb/bsb_spec_set.mli deleted file mode 100644 index 96312be026c..00000000000 --- a/compiler/bsb/bsb_spec_set.mli +++ /dev/null @@ -1,38 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) -type format = Ext_module_system.t - -type spec = {format: format; in_source: bool; suffix: string} - -type t = private spec list - -val empty : t - -val add : spec -> t -> t - -val singleton : spec -> t - -val fold : (spec -> 'a -> 'a) -> t -> 'a -> 'a - -val iter : (spec -> unit) -> t -> unit diff --git a/compiler/bsb/bsb_unix.ml b/compiler/bsb/bsb_unix.ml deleted file mode 100644 index 47979ed7504..00000000000 --- a/compiler/bsb/bsb_unix.ml +++ /dev/null @@ -1,84 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type command = {cmd: string; cwd: string; args: string array} - -let log cmd = - Bsb_log.info "@{Entering@} %s @." cmd.cwd; - Bsb_log.info "@{Cmd:@} "; - Bsb_log.info_args cmd.args - -let command_fatal_error cmd eid = - Bsb_log.error "@{Failure:@} %s \nLocation: %s@." cmd.cmd cmd.cwd; - exit eid - -let run_command_execv_unix cmd : int = - match Unix.fork () with - | 0 -> - log cmd; - Unix.chdir cmd.cwd; - Unix.execv cmd.cmd cmd.args - | pid -> ( - match Unix.waitpid [] pid with - | _, process_status -> ( - match process_status with - | Unix.WEXITED eid -> eid - | Unix.WSIGNALED _ | Unix.WSTOPPED _ -> - Bsb_log.error "@{Interrupted:@} %s@." cmd.cmd; - 2)) - -(** TODO: the args are not quoted, here - we are calling a very limited set of `bsb` commands, so that - we are safe -*) -let run_command_execv_win (cmd : command) = - let old_cwd = Unix.getcwd () in - log cmd; - Unix.chdir cmd.cwd; - let eid = - Sys.command - (String.concat Ext_string.single_space - (Filename.quote cmd.cmd :: (List.tl @@ Array.to_list cmd.args))) - in - Bsb_log.info "@{Leaving@} %s => %s @." cmd.cwd old_cwd; - Unix.chdir old_cwd; - eid - -(** it assume you have permissions, so always catch it to fail - gracefully -*) -let run_command_execv = - if Ext_sys.is_windows_or_cygwin then run_command_execv_win - else run_command_execv_unix - -let rec remove_dir_recursive dir = - match Sys.is_directory dir with - | true -> - let files = Sys.readdir dir in - for i = 0 to Array.length files - 1 do - remove_dir_recursive (Filename.concat dir (Array.unsafe_get files i)) - done; - Unix.rmdir dir - | false -> Sys.remove dir - | exception _ -> () diff --git a/compiler/bsb/bsb_unix.mli b/compiler/bsb/bsb_unix.mli deleted file mode 100644 index e5c6720adcf..00000000000 --- a/compiler/bsb/bsb_unix.mli +++ /dev/null @@ -1,31 +0,0 @@ -(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P. - * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type command = {cmd: string; cwd: string; args: string array} - -val command_fatal_error : command -> int -> unit - -val run_command_execv : command -> int - -val remove_dir_recursive : string -> unit diff --git a/compiler/bsb/bsb_warning.ml b/compiler/bsb/bsb_warning.ml deleted file mode 100644 index f3927090acf..00000000000 --- a/compiler/bsb/bsb_warning.ml +++ /dev/null @@ -1,98 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type warning_error = - | Warn_error_false - (* default [false] to make our changes non-intrusive *) - | Warn_error_true - | Warn_error_number of string - -type t0 = {number: string option; error: warning_error} - -type nonrec t = t0 option - -let use_default = None - -let prepare_warning_concat ~(beg : bool) s = - let s = Ext_string.trim s in - if s = "" then s - else - match s.[0] with - | '0' .. '9' -> if beg then "-w +" ^ s else "+" ^ s - | 'a' .. 'z' -> if beg then "-w " ^ s else "-" ^ s - | 'A' .. 'Z' -> if beg then "-w " ^ s else "+" ^ s - | _ -> if beg then "-w " ^ s else s - -let to_merlin_string x = - "-w " ^ Bsc_warnings.defaults_w - ^ - let customize = - match x with - | Some {number = None} | None -> Ext_string.empty - | Some {number = Some x} -> prepare_warning_concat ~beg:false x - in - if customize = "" then customize else customize ^ "-40-42-61" -(* see #4406 to avoid user pass A - Sync up with {!Warnings.report} -*) - -let from_map (m : Ext_json_types.t Map_string.t) = - let number_opt = Map_string.find_opt m Bsb_build_schemas.number in - let error_opt = Map_string.find_opt m Bsb_build_schemas.error in - match (number_opt, error_opt) with - | None, None -> None - | _, _ -> - let error = - match error_opt with - | Some (True _) -> Warn_error_true - | Some (False _) -> Warn_error_false - | Some (Str {str}) -> Warn_error_number str - | Some x -> Bsb_exception.config_error x "expect true/false or string" - | None -> Warn_error_false - (* To make it less intrusive : warning error has to be enabled*) - in - let number = - match number_opt with - | Some (Str {str = number}) -> Some number - | None -> None - | Some x -> Bsb_exception.config_error x "expect a string" - in - Some {number; error} - -let to_bsb_string ~(package_kind : Bsb_package_kind.t) warning = - match package_kind with - | Toplevel -> ( - match warning with - | None -> Ext_string.empty - | Some warning -> ( - (match warning.number with - | None -> Ext_string.empty - | Some x -> prepare_warning_concat ~beg:true x) - ^ - match warning.error with - | Warn_error_true -> " -warn-error A" - | Warn_error_number y -> " -warn-error " ^ y - | Warn_error_false -> Ext_string.empty)) - | Dependency _ -> " -w a" -(* TODO: this is the current default behavior *) diff --git a/compiler/bsb/bsb_warning.mli b/compiler/bsb/bsb_warning.mli deleted file mode 100644 index d83ec889073..00000000000 --- a/compiler/bsb/bsb_warning.mli +++ /dev/null @@ -1,44 +0,0 @@ -(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type warning_error = - | Warn_error_false - (* default [false] to make our changes non-intrusive *) - | Warn_error_true - | Warn_error_number of string - -type t0 = {number: string option; error: warning_error} - -type nonrec t = t0 option - -val to_merlin_string : t -> string -(** Extra work is need to make merlin happy *) - -val from_map : Ext_json_types.t Map_string.t -> t - -val to_bsb_string : package_kind:Bsb_package_kind.t -> t -> string -(** [to_bsb_string not_dev warning] -*) - -val use_default : t diff --git a/compiler/bsb/bsb_watcher_gen.ml b/compiler/bsb/bsb_watcher_gen.ml deleted file mode 100644 index 341b2b0d8bc..00000000000 --- a/compiler/bsb/bsb_watcher_gen.ml +++ /dev/null @@ -1,49 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let kvs = Ext_json_noloc.kvs - -let arr = Ext_json_noloc.arr - -let str = Ext_json_noloc.str - -let generate_sourcedirs_meta ~name (res : Bsb_file_groups.t) = - let v = - kvs - [ - ("dirs", arr (Ext_array.of_list_map res.files (fun x -> str x.dir))); - ( "generated", - arr - (Array.of_list - @@ Ext_list.fold_left res.files [] (fun acc x -> - Ext_list.flat_map_append x.generators acc (fun x -> - Ext_list.map x.output str))) ); - ( "pkgs", - arr - (Array.of_list - (Bsb_pkg.to_list (fun pkg path -> - arr [|str (Bsb_pkg_types.to_string pkg); str path|]))) ); - ] - in - Ext_json_noloc.to_file name v diff --git a/compiler/bsb/bsb_watcher_gen.mli b/compiler/bsb/bsb_watcher_gen.mli deleted file mode 100644 index 1d13f3cb7fb..00000000000 --- a/compiler/bsb/bsb_watcher_gen.mli +++ /dev/null @@ -1,33 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val generate_sourcedirs_meta : name:string -> Bsb_file_groups.t -> unit -(** This module try to generate some meta data so that - everytime [rescript.json] is reloaded, we can re-read - such meta data changes in the watcher. - - Another way of doing it is processing [rescript.json] - directly in [watcher] but that would - mean the duplication of logic in [bsb] and [bsb_watcher] -*) diff --git a/compiler/bsb/bsb_world.ml b/compiler/bsb/bsb_world.ml deleted file mode 100644 index 73c47f0fa69..00000000000 --- a/compiler/bsb/bsb_world.ml +++ /dev/null @@ -1,94 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let ( // ) = Ext_path.combine -let vendor_ninja = Bsb_global_paths.vendor_ninja - -let make_world_deps cwd (config : Bsb_config_types.t option) - (ninja_args : string array) = - let package_specs, jsx = - match config with - | None -> - (* When this running bsb does not read rescript.json, - we will read such json file to know which [package-specs] - it wants - *) - Bsb_config_parse.deps_from_bsconfig () - | Some config -> (config.package_specs, config.jsx) - in - let args = - if Ext_array.is_empty ninja_args then [|vendor_ninja|] - else Array.append [|vendor_ninja|] ninja_args - in - let lib_artifacts_dir = Bsb_config.lib_bs in - let queue = Bsb_build_util.walk_all_deps cwd in - (* let oc = open_out_bin ".deps.log" in - queue |> Queue.iter (fun ({top; proj_dir} : Bsb_build_util.package_context) -> - match top with - | Expect_none -> () - | Expect_name s -> - output_string oc s ; - output_string oc " : "; - output_string oc proj_dir; - output_string oc "\n" - ); - close_out oc ; *) - queue - |> Queue.iter (fun ({top; proj_dir} : Bsb_build_util.package_context) -> - match top with - | Expect_none -> () - | Expect_name s -> - print_endline ("Dependency on " ^ s); - let lib_bs_dir = proj_dir // lib_artifacts_dir in - Bsb_build_util.mkp lib_bs_dir; - let _config : _ option = - Bsb_ninja_regen.regenerate_ninja - ~package_kind:(Dependency {package_specs; jsx}) - ~per_proj_dir:proj_dir ~forced:false ~warn_legacy_config:false - ~warn_as_error:None - in - let command = - {Bsb_unix.cmd = vendor_ninja; cwd = lib_bs_dir; args} - in - let eid = Bsb_unix.run_command_execv command in - if eid <> 0 then Bsb_unix.command_fatal_error command eid; - (* When ninja is not regenerated, ninja will still do the build, - still need reinstall check - Note that we can check if ninja print "no work to do", - then don't need reinstall more - *) - Bsb_log.info "@{Installation started@}@."; - let install_dir = proj_dir // "lib" // "ocaml" in - Bsb_build_util.mkp install_dir; - let install_command = - { - Bsb_unix.cmd = vendor_ninja; - cwd = install_dir; - args = [|vendor_ninja; "-f"; ".." // "bs" // "install.ninja"|]; - } - in - let eid = Bsb_unix.run_command_execv install_command in - if eid <> 0 then Bsb_unix.command_fatal_error install_command eid; - Bsb_log.info "@{Installation finished@}@."); - print_endline "Dependency Finished" diff --git a/compiler/bsb/bsb_world.mli b/compiler/bsb/bsb_world.mli deleted file mode 100644 index e705a220998..00000000000 --- a/compiler/bsb/bsb_world.mli +++ /dev/null @@ -1,26 +0,0 @@ -(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val make_world_deps : - string -> Bsb_config_types.t option -> string array -> unit diff --git a/compiler/bsb/data_format.md b/compiler/bsb/data_format.md deleted file mode 100644 index a47acf0f79c..00000000000 --- a/compiler/bsb/data_format.md +++ /dev/null @@ -1,24 +0,0 @@ - - -# format about lib/bs/.bsbuild - -This file (in binary) contains all file info needed in build. - -It is encoded in bsb_db_encode.ml and bsb_db_decode.ml, the format is optimized for performance in decoding where it is used most. - - -- The first 16 chars is digest of the following content - -The rest is encoding for each group (source code and test), in most cases, you have one group or two groups (one for lib one for test). - -Begining with a new line, the number of total groups (encoded in text format) are encoded. - - -For each group, it starts with a newline and the -number of modules (text format). - -The following are list of modules in sorted order (Ext_string.compare) separated by newline. - -The following are list of directories separated by tab character. - -The next is a fixed length for module description, its encoding is hard coded in Bsb_db_encode.make_encoding \ No newline at end of file diff --git a/compiler/bsb/dune b/compiler/bsb/dune deleted file mode 100644 index bdf0b9f031d..00000000000 --- a/compiler/bsb/dune +++ /dev/null @@ -1,6 +0,0 @@ -(library - (name bsb) - (wrapped false) - (flags - (:standard -w +a-4-9-40-41-42-70)) - (libraries common ext str unix)) diff --git a/compiler/bsb_exe/dune b/compiler/bsb_exe/dune deleted file mode 100644 index 0c22a4930cb..00000000000 --- a/compiler/bsb_exe/dune +++ /dev/null @@ -1,14 +0,0 @@ -(env - (static - (flags - (:standard -ccopt -static)))) - -(executable - (name rescript_main) - (public_name rescript-legacy) - (package rescript) - (enabled_if - (<> %{profile} browser)) - (flags - (:standard -w +a-4-9-40-41-42-70)) - (libraries bsb common ext str unix)) diff --git a/compiler/bsb_exe/rescript_main.ml b/compiler/bsb_exe/rescript_main.ml deleted file mode 100644 index b28527a01d2..00000000000 --- a/compiler/bsb_exe/rescript_main.ml +++ /dev/null @@ -1,260 +0,0 @@ -(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let () = Bsb_log.setup () - -let separator = "--" - -let no_deps_mode = ref false - -let do_install = ref false - -let warning_as_error = ref None - -let force_regenerate = ref false - -type spec = Bsb_arg.spec - -let call_spec f : spec = Unit (Unit_call f) - -let unit_set_spec b : spec = Unit (Unit_set b) - -let string_set_spec s : spec = String (String_set s) - -let string_call f : spec = String (String_call f) - -let failed_annon ~rev_args = - match rev_args with - | x :: _ -> Bsb_arg.bad_arg ("Don't know what to do with " ^ x) - | _ -> () - -(*Note that [keepdepfile] only makes sense when combined with [deps] for optimization*) - -(** Invariant: it has to be the last command of [bsb] *) -let exec_command_then_exit (type t) (command : string) : t = - Bsb_log.info "@{CMD:@} %s@." command; - exit (Sys.command command) - -(* Execute the underlying ninja build call, then exit (as opposed to keep watching) *) -let ninja_command_exit (type t) (ninja_args : string array) : t = - let ninja_args_len = Array.length ninja_args in - let lib_artifacts_dir = Bsb_config.lib_bs in - if Ext_sys.is_windows_or_cygwin then - let path_ninja = Filename.quote Bsb_global_paths.vendor_ninja in - exec_command_then_exit - (if ninja_args_len = 0 then - Ext_string.inter3 path_ninja "-C" lib_artifacts_dir - else - let args = - Array.append [|path_ninja; "-C"; lib_artifacts_dir|] ninja_args - in - Ext_string.concat_array Ext_string.single_space args) - else - let ninja_common_args = [|"ninja.exe"; "-C"; lib_artifacts_dir|] in - let args = - if ninja_args_len = 0 then ninja_common_args - else Array.append ninja_common_args ninja_args - in - Bsb_log.info_args args; - Unix.execvp Bsb_global_paths.vendor_ninja args - -(** - Cache files generated: - - .bsdircache in project root dir - - .bsdeps in builddir - - What will happen, some flags are really not good - ninja -C _build -*) -let clean_usage = - "Usage: rescript clean \n\n`rescript clean` cleans build artifacts\n" - -let build_usage = - "Usage: rescript build -- \n\n\ - `rescript build` builds the project with dependencies\n\n\ - `rescript build -- -h` for Ninja options (internal usage only; unstable)\n" - -let install_target () = - let ( // ) = Filename.concat in - let vendor_ninja = Bsb_global_paths.vendor_ninja in - let install_dir = "lib" // "ocaml" in - Bsb_build_util.mkp install_dir; - let install_command = - { - Bsb_unix.cmd = vendor_ninja; - cwd = install_dir; - args = [|vendor_ninja; "-f"; ".." // "bs" // "install.ninja"|]; - } - in - let eid = Bsb_unix.run_command_execv install_command in - if eid <> 0 then Bsb_unix.command_fatal_error install_command eid - -let setup_runtime_path path = Runtime_package.path := path - -let build_subcommand ~start argv argv_len = - let i = Ext_array.rfind_with_index argv Ext_string.equal separator in - - Bsb_arg.parse_exn ~usage:build_usage ~start - ?finish:(if i < 0 then None else Some i) - ~argv - [| - ("-w", unit_set_spec (ref false), "Watch mode"); - ( "-ws", - string_set_spec (ref ""), - "[host]:port set up host & port for WebSocket build notifications" ); - ("-verbose", call_spec Bsb_log.verbose, "Set the output to be verbose"); - ( "-with-deps", - unit_set_spec (ref true), - "*deprecated* This is the default behavior now. This option will be \ - removed in a future release" ); - ( "-install", - unit_set_spec do_install, - "*internal* Install public interface files for dependencies" ); - (* This should be put in a subcommand - previously it works with the implication `bsb && bsb -install` - *) - ( "-regen", - unit_set_spec force_regenerate, - "*internal* \n\ - Always regenerate build.ninja no matter if rescript.json is changed \ - or not" ); - ( "-no-deps", - unit_set_spec no_deps_mode, - "*internal* Needed for watcher to build without dependencies on file \ - change" ); - ( "-runtime-path", - string_call setup_runtime_path, - "*internal* Set the path of the runtime package (@rescript/runtime)" ); - ( "-warn-error", - string_call (fun s -> warning_as_error := Some s), - "Warning numbers and whether to turn them into errors, e.g., \ - \"+8+32-102\"" ); - |] - failed_annon; - - let ninja_args = - if i < 0 then [||] else Array.sub argv (i + 1) (argv_len - i - 1) - in - match ninja_args with - | [|"-h"|] -> ninja_command_exit ninja_args - | _ -> - let warn_as_error = - match !warning_as_error with - | Some s -> - let () = - try Warnings.parse_options true s - with Arg.Bad msg -> Bsb_arg.bad_arg (msg ^ "\n") - in - Some s - | None -> None - in - let config_opt = - Bsb_ninja_regen.regenerate_ninja ~package_kind:Toplevel - ~per_proj_dir:Bsb_global_paths.cwd ~forced:!force_regenerate - ~warn_legacy_config:true ~warn_as_error - in - if not !no_deps_mode then - Bsb_world.make_world_deps Bsb_global_paths.cwd config_opt ninja_args; - if !do_install then install_target (); - ninja_command_exit ninja_args - -let clean_subcommand ~start argv = - Bsb_arg.parse_exn ~usage:clean_usage ~start ~argv - [| - ("-verbose", call_spec Bsb_log.verbose, "Set the output to be verbose"); - ( "-with-deps", - unit_set_spec (ref true), - "*deprecated* This is the default behavior now. This option will be \ - removed in a future release" ); - |] - failed_annon; - Bsb_clean.clean_bs_deps Bsb_global_paths.cwd; - Bsb_clean.clean_self Bsb_global_paths.cwd - -let list_files = ref false - -let info_subcommand ~start argv = - Bsb_arg.parse_exn ~usage:"query the project" ~start ~argv - [|("-list-files", unit_set_spec list_files, "list source files")|] - (fun ~rev_args -> - (match rev_args with - | x :: _ -> raise (Bsb_arg.Bad ("Don't know what to do with " ^ x)) - | [] -> ()); - if !list_files then - match - Bsb_ninja_regen.regenerate_ninja ~package_kind:Toplevel - ~per_proj_dir:Bsb_global_paths.cwd ~forced:true - ~warn_legacy_config:true ~warn_as_error:None - with - | None -> assert false - | Some {file_groups = {files}} -> - Ext_list.iter files (fun {sources} -> - Map_string.iter sources (fun _ {info; name_sans_extension} -> - let extensions = - match info with - | Intf -> assert false - | Impl -> [".res"] - | Impl_intf -> [".res"; ".resi"] - in - Ext_list.iter extensions (fun x -> - print_endline (name_sans_extension ^ x))))) - -(* see discussion #929, if we catch the exception, we don't have stacktrace... *) -let () = - let argv = Sys.argv in - let argv_len = Array.length argv in - try - if argv_len = 1 then ( - (* specialize this path which is used in watcher *) - let config_opt = - Bsb_ninja_regen.regenerate_ninja ~package_kind:Toplevel - ~per_proj_dir:Bsb_global_paths.cwd ~forced:false - ~warn_legacy_config:true ~warn_as_error:None - in - Bsb_world.make_world_deps Bsb_global_paths.cwd config_opt [||]; - ninja_command_exit [||]) - else - match argv.(1) with - | "build" -> build_subcommand ~start:2 argv argv_len - | "clean" -> clean_subcommand ~start:2 argv - | "info" -> - (* internal *) - info_subcommand ~start:2 argv - | first_arg -> - prerr_endline @@ "Unknown subcommand or flags: " ^ first_arg; - exit 1 - with - | Bsb_exception.Error e -> - Bsb_exception.print Format.err_formatter e; - Format.pp_print_newline Format.err_formatter (); - exit 2 - | Ext_json_parse.Error (start, _, e) -> - Format.fprintf Format.err_formatter - "File %S, line %d\n@{Error:@} %a@." start.pos_fname start.pos_lnum - Ext_json_parse.report_error e; - exit 2 - | Bsb_arg.Bad s | Sys_error s -> - Format.fprintf Format.err_formatter "@{Error:@} %s" s; - exit 2 - | e -> Ext_pervasives.reraise e diff --git a/compiler/bsb_exe/rescript_main.mli b/compiler/bsb_exe/rescript_main.mli deleted file mode 100644 index aa4fd337794..00000000000 --- a/compiler/bsb_exe/rescript_main.mli +++ /dev/null @@ -1,23 +0,0 @@ -(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) diff --git a/compiler/bsb_helper/bsb_db_decode.ml b/compiler/bsb_helper/bsb_db_decode.ml deleted file mode 100644 index dd6e5e4af1d..00000000000 --- a/compiler/bsb_helper/bsb_db_decode.ml +++ /dev/null @@ -1,119 +0,0 @@ -(* Copyright (C) 2019 - Present Hongbo Zhang, Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let bsbuild_cache = Literals.bsbuild_cache - -type group = - | Dummy - | Group of { - modules: string array; - dir_length: int; - dir_info_offset: int; - module_info_offset: int; - } - -type t = {lib: group; dev: group; content: string (* string is whole content*)} - -type cursor = int ref - -(*TODO: special case when module_count is zero *) -let rec decode (x : string) : t = - let (offset : cursor) = ref 0 in - let lib = decode_single x offset in - let dev = decode_single x offset in - {lib; dev; content = x} - -and decode_single (x : string) (offset : cursor) : group = - let module_number = Ext_pervasives.parse_nat_of_string x offset in - incr offset; - if module_number <> 0 then ( - let modules = decode_modules x offset module_number in - let dir_info_offset = !offset in - let module_info_offset = String.index_from x dir_info_offset '\n' + 1 in - let dir_length = - Char.code x.[module_info_offset] - 48 - (* Char.code '0'*) - in - offset := module_info_offset + 1 + (dir_length * module_number) + 1; - Group {modules; dir_info_offset; module_info_offset; dir_length}) - else Dummy - -and decode_modules (x : string) (offset : cursor) module_number : string array = - let result = Array.make module_number "" in - let last = ref !offset in - let cur = ref !offset in - let tasks = ref 0 in - while !tasks <> module_number do - if String.unsafe_get x !cur = '\n' then ( - let offs = !last in - let len = !cur - !last in - Array.unsafe_set result !tasks (Ext_string.unsafe_sub x offs len); - incr tasks; - last := !cur + 1); - incr cur - done; - offset := !cur; - result - -(* TODO: shall we check the consistency of digest *) -let read_build_cache ~dir : t = - let all_content = Ext_io.load_file (Filename.concat dir bsbuild_cache) in - decode all_content - -type module_info = {case: bool; (* which is Bsb_db.case*) dir_name: string} - -let find_opt ({content = whole} as db : t) lib (key : string) : - module_info option = - match if lib then db.lib else db.dev with - | Dummy -> None - | Group ({modules} as group) -> ( - let i = Ext_string_array.find_sorted modules key in - match i with - | None -> None - | Some count -> - let encode_len = group.dir_length in - let index = - Ext_string.get_1_2_3_4 whole - ~off:(group.module_info_offset + 1 + (count * encode_len)) - encode_len - in - let case = not (index mod 2 = 0) in - let ith = index lsr 1 in - let dir_name_start = - if ith = 0 then group.dir_info_offset - else Ext_string.index_count whole group.dir_info_offset '\t' ith + 1 - in - let dir_name_finish = String.index_from whole dir_name_start '\t' in - Some - { - case; - dir_name = - String.sub whole dir_name_start (dir_name_finish - dir_name_start); - }) - -let find db dependent_module is_not_lib_dir = - let opt = find_opt db true dependent_module in - match opt with - | Some _ -> opt - | None -> if is_not_lib_dir then find_opt db false dependent_module else None diff --git a/compiler/bsb_helper/bsb_db_decode.mli b/compiler/bsb_helper/bsb_db_decode.mli deleted file mode 100644 index 2aa097bb7d3..00000000000 --- a/compiler/bsb_helper/bsb_db_decode.mli +++ /dev/null @@ -1,49 +0,0 @@ -(* Copyright (C) 2019 - Present Authors of ReScript - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type group = private - | Dummy - | Group of { - modules: string array; - dir_length: int; - dir_info_offset: int; - module_info_offset: int; - } - -type t = {lib: group; dev: group; content: string (* string is whole content*)} - -val read_build_cache : dir:string -> t - -type module_info = {case: bool; (* Bsb_db.case*) dir_name: string} - -val find : - t -> - (* contains global info *) - string -> - (* module name *) - bool -> - (* more likely to be zero *) - module_info option - -val decode : string -> t diff --git a/compiler/bsb_helper/bsb_helper_depfile_gen.ml b/compiler/bsb_helper/bsb_helper_depfile_gen.ml deleted file mode 100644 index e4f9edb7505..00000000000 --- a/compiler/bsb_helper/bsb_helper_depfile_gen.ml +++ /dev/null @@ -1,172 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let dep_lit = " :" - -let write_buf name buf = - let oc = open_out_bin name in - Ext_buffer.output_buffer oc buf; - close_out oc - -(* should be good for small file *) -let load_file name (buf : Ext_buffer.t) : unit = - let len = Ext_buffer.length buf in - let ic = open_in_bin name in - let n = in_channel_length ic in - if n <> len then ( - close_in ic; - write_buf name buf) - else - let holder = really_input_string ic n in - close_in ic; - if Ext_buffer.not_equal buf holder then write_buf name buf - -let write_file name (buf : Ext_buffer.t) = - if Sys.file_exists name then load_file name buf else write_buf name buf - -(* return an non-decoded string *) -let extract_dep_raw_string (fn : string) : string = - let ic = open_in_bin fn in - let size = input_binary_int ic in - let s = really_input_string ic size in - close_in ic; - s - -(* Make sure it is the same as {!Binary_ast.magic_sep_char}*) -let magic_sep_char = '\n' - -let deps_of_channel (ic : in_channel) : string list = - let size = input_binary_int ic in - let s = really_input_string ic size in - let rec aux (s : string) acc (offset : int) size : string list = - if offset < size then - let next_tab = String.index_from s offset magic_sep_char in - aux s (String.sub s offset (next_tab - offset) :: acc) (next_tab + 1) size - else acc - in - aux s [] 1 size - -(** Please refer to {!Binary_ast} for encoding format, we move it here - mostly for cutting the dependency so that [bsb_helper.exe] does - not depend on compler-libs -*) -(* let read_deps (fn : string) : string list = - let ic = open_in_bin fn in - let v = deps_of_channel ic in - close_in ic; - v -*) - -let output_file (buf : Ext_buffer.t) source namespace = - Ext_buffer.add_string buf (Ext_namespace_encode.make ?ns:namespace source) - -(** for rescript artifacts - [lhs_suffix] is [.cmj] - [rhs_suffix] - is [.cmj] if it has [ml] (in this case does not care about mli or not) - is [.cmi] if it has [mli] -*) -let oc_cmi buf namespace source = - Ext_buffer.add_char buf ' '; - output_file buf source namespace; - Ext_buffer.add_string buf Literals.suffix_cmi - -(* For cases with self cycle - e.g, in b.ml - {[ - include B - ]} - When ns is not turned on, it makes sense that b may come from third party package. - Hoever, this case is wont supported. - It complicates when it has interface file or not. - - if it has interface file, the current interface will have priority, failed to build? - - if it does not have interface file, the build will not open this module at all(-bs-read-cmi) - - When ns is turned on, `B` is interprted as `Ns-B` which is a cyclic dependency, - it can be errored out earlier - - #5368: It turns out there are many false positives on detecting self-cycles (see: `tests/build_tests/zerocycle`) - To properly solve this, we would need to `compiler/ml/depend.ml` because - cmi and cmj is broken in the first place (same problem as in ocaml/ocaml#4618). - So we will just ignore the self-cycles. Even if there is indeed a self-cycle, it should fail to compile anyway. -*) -let oc_deps (ast_file : string) (is_dev : bool) (db : Bsb_db_decode.t) - (namespace : string option) (buf : Ext_buffer.t) (kind : [`impl | `intf]) : - unit = - (* TODO: move namespace upper, it is better to resolve ealier *) - let cur_module_name = Ext_filename.module_name ast_file in - let at_most_once : unit lazy_t = - lazy - (output_file buf (Ext_filename.chop_extension_maybe ast_file) namespace; - Ext_buffer.add_string buf - (if kind = `impl then Literals.suffix_cmj else Literals.suffix_cmi); - (* print the source *) - Ext_buffer.add_string buf dep_lit) - in - (match namespace with - | None -> () - | Some ns -> - Lazy.force at_most_once; - Ext_buffer.add_char buf ' '; - Ext_buffer.add_string buf ns; - Ext_buffer.add_string buf Literals.suffix_cmi (* always cmi *)); - (* TODO: moved into static files*) - let s = extract_dep_raw_string ast_file in - let offset = ref 1 in - let size = String.length s in - while !offset < size do - let next_tab = String.index_from s !offset magic_sep_char in - let dependent_module = String.sub s !offset (next_tab - !offset) in - (if dependent_module = cur_module_name then - (*prerr_endline ("FAILED: " ^ cur_module_name ^ " has a self cycle"); - exit 2*) - (* #5368 ignore self dependencies *) () - else - match Bsb_db_decode.find db dependent_module is_dev with - | None -> () - | Some {dir_name; case} -> - Lazy.force at_most_once; - let source = - Filename.concat dir_name - (if case then dependent_module - else Ext_string.uncapitalize_ascii dependent_module) - in - Ext_buffer.add_char buf ' '; - if kind = `impl then ( - output_file buf source namespace; - Ext_buffer.add_string buf Literals.suffix_cmj); - (* #3260 cmj changes does not imply cmi change anymore *) - oc_cmi buf namespace source); - offset := next_tab + 1 - done; - if Lazy.is_val at_most_once then Ext_buffer.add_char buf '\n' - -let emit_d (is_dev : bool) (namespace : string option) (mlast : string) - (mliast : string) = - let data = Bsb_db_decode.read_build_cache ~dir:Filename.current_dir_name in - let buf = Ext_buffer.create 2048 in - let filename = Ext_filename.new_extension mlast Literals.suffix_d in - oc_deps mlast is_dev data namespace buf `impl; - if mliast <> "" then oc_deps mliast is_dev data namespace buf `intf; - write_file filename buf diff --git a/compiler/bsb_helper/bsb_helper_depfile_gen.mli b/compiler/bsb_helper/bsb_helper_depfile_gen.mli deleted file mode 100644 index 840ccb73274..00000000000 --- a/compiler/bsb_helper/bsb_helper_depfile_gen.mli +++ /dev/null @@ -1,36 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -val deps_of_channel : in_channel -> string list -(** [deps_of_channel ic] - given an input_channel dumps all modules it depend on, only used for debugging -*) - -val emit_d : - bool -> - string option -> - string -> - string -> - (* empty string means no mliast *) - unit diff --git a/compiler/bsb_helper/dune b/compiler/bsb_helper/dune deleted file mode 100644 index d0ecc7a71e5..00000000000 --- a/compiler/bsb_helper/dune +++ /dev/null @@ -1,6 +0,0 @@ -(library - (name bsb_helper) - (wrapped false) - (flags - (:standard -w +a-9-40-42)) - (libraries ext)) diff --git a/compiler/bsb_helper_exe/bsb_helper_main.ml b/compiler/bsb_helper_exe/bsb_helper_main.ml deleted file mode 100644 index 1564ea7215f..00000000000 --- a/compiler/bsb_helper_exe/bsb_helper_main.ml +++ /dev/null @@ -1,53 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -let () = - let namespace = ref None in - let dev_group = ref false in - let argv = Sys.argv in - let l = Array.length argv in - let current = ref 1 in - let rev_list = ref [] in - while !current < l do - let s = argv.(!current) in - incr current; - if s <> "" && s.[0] = '-' then ( - match s with - | "-hash" -> incr current - | "-bs-ns" -> - let ns = argv.(!current) in - namespace := Some ns; - incr current - | "-g" -> dev_group := true - | s -> - prerr_endline ("unknown options: " ^ s); - prerr_endline "available options: -hash [hash]; -bs-ns [ns]; -g"; - exit 2) - else rev_list := s :: !rev_list - done; - match !rev_list with - | [x] -> Bsb_helper_depfile_gen.emit_d !dev_group !namespace x "" - | [y; x] (* reverse order *) -> - Bsb_helper_depfile_gen.emit_d !dev_group !namespace x y - | _ -> () diff --git a/compiler/bsb_helper_exe/bsb_helper_main.mli b/compiler/bsb_helper_exe/bsb_helper_main.mli deleted file mode 100644 index f30aaad4f99..00000000000 --- a/compiler/bsb_helper_exe/bsb_helper_main.mli +++ /dev/null @@ -1,34 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(** Used to generate .d file, for example - {[ - bsb_helper.exe -g 0 -MD src/hi/hello.ml - ]} - It will read the cache file and generate the corresponding - [.d] file. This [.d] file will be used as attribute [depfile] - whether we use namespace or not, the filename of [.mlast], [.d] - should be kept the same, we only need change the name of [.cm*] - and the contents of filename in [.d] -*) diff --git a/compiler/bsb_helper_exe/dune b/compiler/bsb_helper_exe/dune deleted file mode 100644 index 362ee51ea05..00000000000 --- a/compiler/bsb_helper_exe/dune +++ /dev/null @@ -1,14 +0,0 @@ -(env - (static - (flags - (:standard -ccopt -static)))) - -(executable - (name bsb_helper_main) - (public_name bsb_helper) - (package rescript) - (enabled_if - (<> %{profile} browser)) - (flags - (:standard -w +a-9-40-42)) - (libraries bsb_helper)) diff --git a/compiler/dune b/compiler/dune index 2edab16a9ec..436e4159679 100644 --- a/compiler/dune +++ b/compiler/dune @@ -1,8 +1,4 @@ (dirs - bsb - bsb_exe - bsb_helper - bsb_helper_exe bsc common core diff --git a/compiler/ext/bsb_db.ml b/compiler/ext/bsb_db.ml deleted file mode 100644 index b08e6943b50..00000000000 --- a/compiler/ext/bsb_db.ml +++ /dev/null @@ -1,46 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -type case = bool -(** true means upper case*) - -type info = - | Intf - (* intemediate state *) - | Impl - | Impl_intf - -type module_info = { - mutable info: info; - dir: string; - case: bool; - name_sans_extension: string; -} - -type map = module_info Map_string.t - -type 'a cat = {mutable lib: 'a; mutable dev: 'a} - -type t = map cat -(** indexed by the group *) diff --git a/compiler/ext/bsb_db.mli b/compiler/ext/bsb_db.mli deleted file mode 100644 index 3b54f9f1c6c..00000000000 --- a/compiler/ext/bsb_db.mli +++ /dev/null @@ -1,60 +0,0 @@ -(* Copyright (C) 2015-2016 Bloomberg Finance L.P. - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * In addition to the permissions granted to you by the LGPL, you may combine - * or link a "work that uses the Library" with a publicly distributed version - * of this file to produce a combined library or application, then distribute - * that combined work under the terms of your choosing, with no requirement - * to comply with the obligations normally placed on you by section 4 of the - * LGPL version 3 (or the corresponding section of a later version of the LGPL - * should you choose to use a later version). - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) - -(** Store a file called [.bsbuild] that can be communicated - between [bsb.exe] and [bsb_helper.exe]. - [bsb.exe] stores such data which would be retrieved by - [bsb_helper.exe]. It is currently used to combine with - ocamldep to figure out which module->file it depends on -*) - -type case = bool - -type info = - | Intf - (* intemediate state *) - | Impl - | Impl_intf - -type module_info = { - mutable info: info; - dir: string; - case: bool; - name_sans_extension: string; -} - -type map = module_info Map_string.t - -type 'a cat = {mutable lib: 'a; mutable dev: 'a} - -type t = map cat - -(** store the meta data indexed by {!Bsb_dir_index} - {[ - 0 --> lib group - 1 --> dev 1 group - . - - ]} -*) diff --git a/compiler/ext/ext_buffer.ml b/compiler/ext/ext_buffer.ml index 5dbb8396df7..b88b3b8b8c6 100644 --- a/compiler/ext/ext_buffer.ml +++ b/compiler/ext/ext_buffer.ml @@ -107,19 +107,6 @@ let add_char_string b c s = Ext_bytes.unsafe_blit_string s 0 b_buffer (b_position + 1) s_len; b.position <- new_position -(* equivalent to add_char " "; add_char "$"; add_string s *) -let add_ninja_prefix_var b s = - let s_len = String.length s in - let len = s_len + 2 in - let new_position = b.position + len in - if new_position > b.length then resize b len; - let b_buffer = b.buffer in - let b_position = b.position in - Bytes.unsafe_set b_buffer b_position ' '; - Bytes.unsafe_set b_buffer (b_position + 1) '$'; - Ext_bytes.unsafe_blit_string s 0 b_buffer (b_position + 2) s_len; - b.position <- new_position - (* let add_bytes b s = add_string b (Bytes.unsafe_to_string s) let add_buffer b bs = diff --git a/compiler/ext/ext_buffer.mli b/compiler/ext/ext_buffer.mli index a00115b95f1..e42861df887 100644 --- a/compiler/ext/ext_buffer.mli +++ b/compiler/ext/ext_buffer.mli @@ -97,6 +97,4 @@ val add_int_4 : t -> int -> unit val add_string_char : t -> string -> char -> unit -val add_ninja_prefix_var : t -> string -> unit - val add_char_string : t -> char -> string -> unit diff --git a/compiler/ext/ext_filename.ml b/compiler/ext/ext_filename.ml index cb3302bac4d..fd7c64c4267 100644 --- a/compiler/ext/ext_filename.ml +++ b/compiler/ext/ext_filename.ml @@ -28,17 +28,6 @@ let is_dir_sep_win_cygwin c = c = '/' || c = '\\' || c = ':' let is_dir_sep = if Sys.unix then is_dir_sep_unix else is_dir_sep_win_cygwin -(* reference ninja.cc IsKnownShellSafeCharacter *) -let maybe_quote (s : string) = - let noneed_quote = - Ext_string.for_all s (function - | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' | '_' | '+' | '-' | '.' | '/' | '@' - -> - true - | _ -> false) - in - if noneed_quote then s else Filename.quote s - let chop_extension_maybe name = let rec search_dot i = if i < 0 || is_dir_sep (String.unsafe_get name i) then name diff --git a/compiler/ext/ext_filename.mli b/compiler/ext/ext_filename.mli index e111ee2002f..5c678310b9e 100644 --- a/compiler/ext/ext_filename.mli +++ b/compiler/ext/ext_filename.mli @@ -33,8 +33,6 @@ val is_dir_sep : char -> bool -val maybe_quote : string -> string - val chop_extension_maybe : string -> string (* return an empty string if no extension found *) diff --git a/compiler/ext/ext_json_noloc.ml b/compiler/ext/ext_json_noloc.ml index 4977770cf06..3697a02226f 100644 --- a/compiler/ext/ext_json_noloc.ml +++ b/compiler/ext/ext_json_noloc.ml @@ -22,7 +22,6 @@ * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *) -(* This file is only used in bsb watcher searlization *) type t = | True | False diff --git a/compiler/ext/literals.ml b/compiler/ext/literals.ml index 2b469652ada..718110205df 100644 --- a/compiler/ext/literals.ml +++ b/compiler/ext/literals.ml @@ -76,8 +76,6 @@ let bsconfig_json = "bsconfig.json" let rescript_json = "rescript.json" -let build_ninja = "build.ninja" - (* Name of the library file created for each external dependency. *) let library_file = "lib" @@ -141,8 +139,6 @@ let node_current = "." let gentype_import1 = "genType.import" let gentype_import2 = "gentype.import" -let bsbuild_cache = ".bsbuild" - let sourcedirs_meta = ".sourcedirs.json" (* Note the build system should check the validity of filenames diff --git a/compiler/gentype/GenTypeConfig.ml b/compiler/gentype/GenTypeConfig.ml index e6c05c2d43a..4dcfffa4588 100644 --- a/compiler/gentype/GenTypeConfig.ml +++ b/compiler/gentype/GenTypeConfig.ml @@ -179,7 +179,7 @@ let read_config ~get_config_file ~namespace = let generated_file_extension = generated_file_extension_string_option in let platform_lib = "rescript" in if !Debug.config then ( - Log_.item "Project roLiterals.bsconfig_jsonot: %s\n" project_root; + Log_.item "Project root: %s\n" project_root; if bsb_project_root <> project_root then Log_.item "bsb project root: %s\n" bsb_project_root; Log_.item "Config module:%s shims:%d entries \n" diff --git a/compiler/syntax/src/res_scanner.ml b/compiler/syntax/src/res_scanner.ml index 5615b32add8..0c4ae58bce2 100644 --- a/compiler/syntax/src/res_scanner.ml +++ b/compiler/syntax/src/res_scanner.ml @@ -54,7 +54,6 @@ let position scanner = } (* Small debugging util - ❯ echo 'let msg = "hello"' | ./lib/rescript-legacy.exe let msg = "hello" ^-^ let 0-3 let msg = "hello" diff --git a/docs/docson/build-schema.json b/docs/docson/build-schema.json index 03204c0cd54..9a33b46b774 100644 --- a/docs/docson/build-schema.json +++ b/docs/docson/build-schema.json @@ -2,7 +2,12 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { "module-format": { - "enum": ["esmodule", "commonjs", "es6", "es6-global"], + "enum": [ + "esmodule", + "commonjs", + "es6", + "es6-global" + ], "description": "es6 and es6-global are deprecated. Default: commonjs." }, "suffix-spec": { @@ -23,7 +28,9 @@ "$ref": "#/definitions/suffix-spec" } }, - "required": ["module"] + "required": [ + "module" + ] }, "package-spec": { "oneOf": [ @@ -66,7 +73,11 @@ "analysis": { "type": "array", "items": { - "enum": ["dce", "exception", "termination"] + "enum": [ + "dce", + "exception", + "termination" + ] }, "description": "The types of analysis to activate. `dce` means dead code analysis, `exception` means exception analysis, and `termination` is to check for infinite loops." }, @@ -165,9 +176,10 @@ "type": "string", "description": "name of the directory" }, - "type": { - "enum": ["dev"] + "enum": [ + "dev" + ] }, "files": { "oneOf": [ @@ -213,7 +225,9 @@ "description": "Selected modules, for example, [Module_a, Module_b] " }, { - "enum": ["all"] + "enum": [ + "all" + ] } ], "description": "Default: export all modules. It is recommended for library developers to hide some files/interfaces" @@ -266,7 +280,9 @@ } } }, - "required": ["dir"] + "required": [ + "dir" + ] }, { "title": "Single non-nested directory", @@ -304,7 +320,9 @@ "properties": { "version": { "type": "number", - "enum": [4], + "enum": [ + 4 + ], "description": "Whether to apply the specific version of JSX PPX transformation" }, "module": { @@ -373,7 +391,7 @@ "items": { "type": "string" }, - "description": "a list of directories that bsb will not look into" + "description": "a list of directories that the build system will not look into" }, "bs-dependencies": { "$ref": "#/definitions/dependencies", @@ -454,7 +472,7 @@ }, "js-post-build": { "$ref": "#/definitions/js-post-build", - "description": "(Experimental) post-processing hook. bsb will invoke `cmd ${file}` whenever a `${file}` is changed" + "description": "(Experimental) post-processing hook. The build system will invoke `cmd ${file}` whenever a `${file}` is changed" }, "package-specs": { "$ref": "#/definitions/package-specs", @@ -491,5 +509,8 @@ } }, "additionalProperties": false, - "required": ["name", "sources"] -} + "required": [ + "name", + "sources" + ] +} \ No newline at end of file diff --git a/docs/reactive_reanalyze_design.md b/docs/reactive_reanalyze_design.md index 2a208266bba..cf828df65f4 100644 --- a/docs/reactive_reanalyze_design.md +++ b/docs/reactive_reanalyze_design.md @@ -297,7 +297,7 @@ let runAnalysisIncremental ~config ~events = ## Open Questions -1. **Build system integration**: How to get file events from rewatch/ninja? +1. **Build system integration**: How to get file events from rewatch? 2. **CMT staleness**: What if build system is still writing CMT files? 3. **Multi-project**: How to handle monorepos with multiple rescript.json? 4. **Memory limits**: When to evict file_data from cache? diff --git a/lib_dev/paths.js b/lib_dev/paths.js index 38f73664e51..6f01c7f7c4d 100644 --- a/lib_dev/paths.js +++ b/lib_dev/paths.js @@ -32,11 +32,6 @@ export const runtimeEsmOutputDir = path.resolve(projectDir, "lib", "es6"); */ export const rewatchDir = path.resolve(projectDir, "rewatch"); -/** - * path: `/ninja/` - */ -export const ninjaDir = path.resolve(projectDir, "ninja"); - /** * path: `/tests/` */ diff --git a/ninja/.clang-format b/ninja/.clang-format deleted file mode 100644 index 1841c036f9c..00000000000 --- a/ninja/.clang-format +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2014 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This isn't meant to be authoritative, but it's good enough to be useful. -# Still use your best judgement for formatting decisions: clang-format -# sometimes makes strange choices. - -BasedOnStyle: Google -AllowShortFunctionsOnASingleLine: Inline -AllowShortIfStatementsOnASingleLine: false -AllowShortLoopsOnASingleLine: false -ConstructorInitializerAllOnOneLineOrOnePerLine: false -Cpp11BracedListStyle: false -IndentCaseLabels: false diff --git a/ninja/.gitignore b/ninja/.gitignore deleted file mode 100644 index 83f122f70dc..00000000000 --- a/ninja/.gitignore +++ /dev/null @@ -1,40 +0,0 @@ -*.pyc -*.obj -*.exe -*.pdb -*.ilk -TAGS -/build -/build.ninja -/ninja -/ninja.bootstrap -/build_log_perftest -/canon_perftest -/clparser_perftest -/depfile_parser_perftest -/hash_collision_bench -/ninja_test -/manifest_parser_perftest -/graph.png -/doc/manual.html -/doc/doxygen -/gtest-1.6.0 -*.patch - -# Eclipse project files -.project -.cproject - -# SublimeText project files -*.sublime-project -*.sublime-workspace - -# Ninja output -.ninja_deps -.ninja_log - -# Visual Studio Code project files -/.vscode/ -/.ccls-cache/ -*.tar.gz -.vs/ \ No newline at end of file diff --git a/ninja/COPYING b/ninja/COPYING deleted file mode 100644 index 131cb1da46d..00000000000 --- a/ninja/COPYING +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2010 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/ninja/HACKING.md b/ninja/HACKING.md deleted file mode 100644 index bd6fec7d188..00000000000 --- a/ninja/HACKING.md +++ /dev/null @@ -1,252 +0,0 @@ -## Basic overview - -`./configure.py` generates the `build.ninja` files used to build -ninja. It accepts various flags to adjust build parameters. -Run './configure.py --help' for more configuration options. - -The primary build target of interest is `ninja`, but when hacking on -Ninja your changes should be testable so it's more useful to build and -run `ninja_test` when developing. - -### Bootstrapping - -Ninja is built using itself. To bootstrap the first binary, run the -configure script as `./configure.py --bootstrap`. This first compiles -all non-test source files together, then re-builds Ninja using itself. -You should end up with a `ninja` binary (or `ninja.exe`) in the project root. - -#### Windows - -On Windows, you'll need to install Python to run `configure.py`, and -run everything under a Visual Studio Tools Command Prompt (or after -running `vcvarsall` in a normal command prompt). - -For other combinations such as gcc/clang you will need the compiler -(gcc/cl) in your PATH and you will have to set the appropriate -platform configuration script. - -See below if you want to use mingw or some other compiler instead of -Visual Studio. - -##### Using Visual Studio -Assuming that you now have Python installed, then the steps for building under -Windows using Visual Studio are: - -Clone and checkout the latest release (or whatever branch you want). You -can do this in either a command prompt or by opening a git bash prompt: - -``` - $ git clone git://github.com/ninja-build/ninja.git && cd ninja - $ git checkout release -``` - -Then: - -1. Open a Windows command prompt in the folder where you checked out ninja. -2. Select the Microsoft build environment by running -`vcvarsall.bat` with the appropriate environment. -3. Build ninja and test it. - -The steps for a Visual Studio 2015 64-bit build are outlined here: - -``` - > "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 - > python configure.py --bootstrap - > ninja --help -``` -Copy the ninja executable to another location, if desired, e.g. C:\local\Ninja. - -Finally add the path where ninja.exe is to the PATH variable. - -### Adjusting build flags - -Build in "debug" mode while developing (disables optimizations and builds -way faster on Windows): - - ./configure.py --debug - -To use clang, set `CXX`: - - CXX=clang++ ./configure.py - -## How to successfully make changes to Ninja - -Github pull requests are convenient for me to merge (I can just click -a button and it's all handled server-side), but I'm also comfortable -accepting pre-github git patches (via `send-email` etc.). - -Good pull requests have all of these attributes: - -* Are scoped to one specific issue -* Include a test to demonstrate their correctness -* Update the docs where relevant -* Match the Ninja coding style (see below) -* Don't include a mess of "oops, fix typo" commits - -These are typically merged without hesitation. If a change is lacking -any of the above I usually will ask you to fix it, though there are -obvious exceptions (fixing typos in comments don't need tests). - -I am very wary of changes that increase the complexity of Ninja (in -particular, new build file syntax or command-line flags) or increase -the maintenance burden of Ninja. Ninja is already successfully used -by hundreds of developers for large projects and it already achieves -(most of) the goals I set out for it to do. It's probably best to -discuss new feature ideas on the [mailing list](https://groups.google.com/forum/#!forum/ninja-build) -before I shoot down your patch. - -## Testing - -### Test-driven development - -Set your build command to - - ./ninja ninja_test && ./ninja_test --gtest_filter=MyTest.Name - -now you can repeatedly run that while developing until the tests pass -(I frequently set it as my compilation command in Emacs). Remember to -build "all" before committing to verify the other source still works! - -## Testing performance impact of changes - -If you have a Chrome build handy, it's a good test case. There's a -script at `misc/measure.py` that repeatedly runs a command (to address -variance) and summarizes its runtime. E.g. - - path/to/misc/measure.py path/to/my/ninja chrome - -For changing the depfile parser, you can also build `parser_perftest` -and run that directly on some representative input files. - -## Coding guidelines - -Generally it's the [Google C++ coding style][], but in brief: - -* Function name are camelcase. -* Member methods are camelcase, except for trivial getters which are - underscore separated. -* Local variables are underscore separated. -* Member variables are underscore separated and suffixed by an extra - underscore. -* Two spaces indentation. -* Opening braces is at the end of line. -* Lines are 80 columns maximum. -* All source files should have the Google Inc. license header. - -[Google C++ coding style]: https://google.github.io/styleguide/cppguide.html - -## Documentation - -### Style guidelines - -* Use `///` for doxygen. -* Use `\a` to refer to arguments. -* It's not necessary to document each argument, especially when they're - relatively self-evident (e.g. in `CanonicalizePath(string* path, string* err)`, - the arguments are hopefully obvious) - -### Building the manual - - sudo apt-get install asciidoc --no-install-recommends - ./ninja manual - -### Building the code documentation - - sudo apt-get install doxygen - ./ninja doxygen - -## Building for Windows - -While developing, it's helpful to copy `ninja.exe` to another name like -`n.exe`; otherwise, rebuilds will be unable to write `ninja.exe` because -it's locked while in use. - -### Via Visual Studio - -* Install Visual Studio (Express is fine), [Python for Windows][], - and (if making changes) googletest (see above instructions) -* In a Visual Studio command prompt: `python configure.py --bootstrap` - -[Python for Windows]: http://www.python.org/getit/windows/ - -### Via mingw on Windows (not well supported) - -* Install mingw, msys, and python -* In the mingw shell, put Python in your path, and - `python configure.py --bootstrap` -* To reconfigure, run `python configure.py` -* Remember to strip the resulting executable if size matters to you - -### Via mingw on Linux (not well supported) - -Setup on Ubuntu Lucid: -* `sudo apt-get install gcc-mingw32 wine` -* `export CC=i586-mingw32msvc-cc CXX=i586-mingw32msvc-c++ AR=i586-mingw32msvc-ar` - -Setup on Ubuntu Precise: -* `sudo apt-get install gcc-mingw-w64-i686 g++-mingw-w64-i686 wine` -* `export CC=i686-w64-mingw32-gcc CXX=i686-w64-mingw32-g++ AR=i686-w64-mingw32-ar` - -Setup on Arch: -* Uncomment the `[multilib]` section of `/etc/pacman.conf` and `sudo pacman -Sy`. -* `sudo pacman -S mingw-w64-gcc wine` -* `export CC=x86_64-w64-mingw32-cc CXX=x86_64-w64-mingw32-c++ AR=x86_64-w64-mingw32-ar` -* `export CFLAGS=-I/usr/x86_64-w64-mingw32/include` - -Then run: -* `./configure.py --platform=mingw --host=linux` -* Build `ninja.exe` using a Linux ninja binary: `/path/to/linux/ninja` -* Run: `./ninja.exe` (implicitly runs through wine(!)) - -### Using Microsoft compilers on Linux (extremely flaky) - -The trick is to install just the compilers, and not all of Visual Studio, -by following [these instructions][win7sdk]. - -[win7sdk]: http://www.kegel.com/wine/cl-howto-win7sdk.html - -### Using gcov - -Do a clean debug build with the right flags: - - CFLAGS=-coverage LDFLAGS=-coverage ./configure.py --debug - ninja -t clean ninja_test && ninja ninja_test - -Run the test binary to generate `.gcda` and `.gcno` files in the build -directory, then run gcov on the .o files to generate `.gcov` files in the -root directory: - - ./ninja_test - gcov build/*.o - -Look at the generated `.gcov` files directly, or use your favorite gcov viewer. - -### Using afl-fuzz - -Build with afl-clang++: - - CXX=path/to/afl-1.20b/afl-clang++ ./configure.py - ninja - -Then run afl-fuzz like so: - - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ - -You can pass `-x misc/afl-fuzz-tokens` to use the token dictionary. In my -testing, that did not seem more effective though. - -#### Using afl-fuzz with asan - -If you want to use asan (the `isysroot` bit is only needed on OS X; if clang -can't find C++ standard headers make sure your LLVM checkout includes a libc++ -checkout and has libc++ installed in the build directory): - - CFLAGS="-fsanitize=address -isysroot $(xcrun -show-sdk-path)" \ - LDFLAGS=-fsanitize=address CXX=path/to/afl-1.20b/afl-clang++ \ - ./configure.py - AFL_CXX=path/to/clang++ ninja - -Make sure ninja can find the asan runtime: - - DYLD_LIBRARY_PATH=path/to//lib/clang/3.7.0/lib/darwin/ \ - afl-fuzz -i misc/afl-fuzz -o /tmp/afl-fuzz-out ./ninja -n -f @@ diff --git a/ninja/README b/ninja/README deleted file mode 100644 index a1535ffac83..00000000000 --- a/ninja/README +++ /dev/null @@ -1,21 +0,0 @@ -Ninja is a small build system with a focus on speed. -https://ninja-build.org/ - -See the manual -- https://ninja-build.org/manual.html or -doc/manual.asciidoc included in the distribution -- for background -and more details. - -Binaries for Linux, Mac, and Windows are available at - https://github.com/ninja-build/ninja/releases -Run './ninja -h' for Ninja help. - -To build your own binary, on many platforms it should be sufficient to -just run `./configure.py --bootstrap`; for more details see HACKING.md. -(Also read that before making changes to Ninja, as it has advice.) - -Installation is not necessary because the only required file is the -resulting ninja binary. However, to enable features like Bash -completion and Emacs and Vim editing modes, some files in misc/ must be -copied to appropriate locations. - -If you're interested in making changes to Ninja, read HACKING.md first. diff --git a/ninja/bootstrap.py b/ninja/bootstrap.py deleted file mode 100755 index 56eab64d18d..00000000000 --- a/ninja/bootstrap.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import subprocess -import sys - -print('DEPRECATED: this script will be deleted.') -print('use "configure.py --bootstrap" instead.') -subprocess.check_call([sys.executable, 'configure.py', '--bootstrap']) diff --git a/ninja/configure.py b/ninja/configure.py deleted file mode 100755 index b0fb9989881..00000000000 --- a/ninja/configure.py +++ /dev/null @@ -1,703 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2001 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Script that generates the build.ninja for ninja itself. - -Projects that use ninja themselves should either write a similar script -or use a meta-build system that supports Ninja output.""" - -from __future__ import print_function - -from optparse import OptionParser -import os -import shlex -import string -import subprocess -import sys - -sourcedir = os.path.dirname(os.path.realpath(__file__)) -sys.path.insert(0, os.path.join(sourcedir, 'misc')) -import ninja_syntax - - -class Platform(object): - """Represents a host/target platform and its specific build attributes.""" - def __init__(self, platform): - self._platform = platform - if self._platform is not None: - return - self._platform = sys.platform - if self._platform.startswith('linux'): - self._platform = 'linux' - elif self._platform.startswith('freebsd'): - self._platform = 'freebsd' - elif self._platform.startswith('gnukfreebsd'): - self._platform = 'freebsd' - elif self._platform.startswith('openbsd'): - self._platform = 'openbsd' - elif self._platform.startswith('solaris') or self._platform == 'sunos5': - self._platform = 'solaris' - elif self._platform.startswith('mingw'): - self._platform = 'mingw' - elif self._platform.startswith('win'): - self._platform = 'msvc' - elif self._platform.startswith('bitrig'): - self._platform = 'bitrig' - elif self._platform.startswith('netbsd'): - self._platform = 'netbsd' - elif self._platform.startswith('aix'): - self._platform = 'aix' - elif self._platform.startswith('dragonfly'): - self._platform = 'dragonfly' - - @staticmethod - def known_platforms(): - return ['linux', 'darwin', 'freebsd', 'openbsd', 'solaris', 'sunos5', - 'mingw', 'msvc', 'gnukfreebsd', 'bitrig', 'netbsd', 'aix', - 'dragonfly'] - - def platform(self): - return self._platform - - def is_linux(self): - return self._platform == 'linux' - - def is_mingw(self): - return self._platform == 'mingw' - - def is_msvc(self): - return self._platform == 'msvc' - - def msvc_needs_fs(self): - popen = subprocess.Popen(['cl', '/nologo', '/?'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - out, err = popen.communicate() - return b'/FS' in out - - def is_windows(self): - return self.is_mingw() or self.is_msvc() - - def is_solaris(self): - return self._platform == 'solaris' - - def is_aix(self): - return self._platform == 'aix' - - def uses_usr_local(self): - return self._platform in ('freebsd', 'openbsd', 'bitrig', 'dragonfly', 'netbsd') - - def supports_ppoll(self): - return self._platform in ('freebsd', 'linux', 'openbsd', 'bitrig', - 'dragonfly') - - def supports_ninja_browse(self): - return False - - def can_rebuild_in_place(self): - return not (self.is_windows() or self.is_aix()) - -class Bootstrap: - """API shim for ninja_syntax.Writer that instead runs the commands. - - Used to bootstrap Ninja from scratch. In --bootstrap mode this - class is used to execute all the commands to build an executable. - It also proxies all calls to an underlying ninja_syntax.Writer, to - behave like non-bootstrap mode. - """ - def __init__(self, writer, verbose=False): - self.writer = writer - self.verbose = verbose - # Map of variable name => expanded variable value. - self.vars = {} - # Map of rule name => dict of rule attributes. - self.rules = { - 'phony': {} - } - - def comment(self, text): - return self.writer.comment(text) - - def newline(self): - return self.writer.newline() - - def variable(self, key, val): - # In bootstrap mode, we have no ninja process to catch /showIncludes - # output. - self.vars[key] = self._expand(val).replace('/showIncludes', '') - return self.writer.variable(key, val) - - def rule(self, name, **kwargs): - self.rules[name] = kwargs - return self.writer.rule(name, **kwargs) - - def build(self, outputs, rule, inputs=None, **kwargs): - ruleattr = self.rules[rule] - cmd = ruleattr.get('command') - if cmd is None: # A phony rule, for example. - return - - # Implement just enough of Ninja variable expansion etc. to - # make the bootstrap build work. - local_vars = { - 'in': self._expand_paths(inputs), - 'out': self._expand_paths(outputs) - } - for key, val in kwargs.get('variables', []): - local_vars[key] = ' '.join(ninja_syntax.as_list(val)) - - self._run_command(self._expand(cmd, local_vars)) - - return self.writer.build(outputs, rule, inputs, **kwargs) - - def default(self, paths): - return self.writer.default(paths) - - def _expand_paths(self, paths): - """Expand $vars in an array of paths, e.g. from a 'build' block.""" - paths = ninja_syntax.as_list(paths) - return ' '.join(map(self._shell_escape, (map(self._expand, paths)))) - - def _expand(self, str, local_vars={}): - """Expand $vars in a string.""" - return ninja_syntax.expand(str, self.vars, local_vars) - - def _shell_escape(self, path): - """Quote paths containing spaces.""" - return '"%s"' % path if ' ' in path else path - - def _run_command(self, cmdline): - """Run a subcommand, quietly. Prints the full command on error.""" - try: - if self.verbose: - print(cmdline) - subprocess.check_call(cmdline, shell=True) - except subprocess.CalledProcessError: - print('when running: ', cmdline) - raise - - -parser = OptionParser() -profilers = ['gmon', 'pprof'] -parser.add_option('--bootstrap', action='store_true', - help='bootstrap a ninja binary from nothing') -parser.add_option('--verbose', action='store_true', - help='enable verbose build') -parser.add_option('--platform', - help='target platform (' + - '/'.join(Platform.known_platforms()) + ')', - choices=Platform.known_platforms()) -parser.add_option('--host', - help='host platform (' + - '/'.join(Platform.known_platforms()) + ')', - choices=Platform.known_platforms()) -parser.add_option('--debug', action='store_true', - help='enable debugging extras',) -parser.add_option('--profile', metavar='TYPE', - choices=profilers, - help='enable profiling (' + '/'.join(profilers) + ')',) -parser.add_option('--with-gtest', metavar='PATH', help='ignored') -parser.add_option('--with-python', metavar='EXE', - help='use EXE as the Python interpreter', - default=os.path.basename(sys.executable)) -parser.add_option('--force-pselect', action='store_true', - help='ppoll() is used by default where available, ' - 'but some platforms may need to use pselect instead',) -(options, args) = parser.parse_args() -if args: - print('ERROR: extra unparsed command-line arguments:', args) - sys.exit(1) - -platform = Platform(options.platform) -if options.host: - host = Platform(options.host) -else: - host = platform - -BUILD_FILENAME = 'build.ninja' -ninja_writer = ninja_syntax.Writer(open(BUILD_FILENAME, 'w')) -n = ninja_writer - -if options.bootstrap: - # Make the build directory. - try: - os.mkdir('build') - except OSError: - pass - # Wrap ninja_writer with the Bootstrapper, which also executes the - # commands. - print('bootstrapping ninja...') - n = Bootstrap(n, verbose=options.verbose) - -n.comment('This file is used to build ninja itself.') -n.comment('It is generated by ' + os.path.basename(__file__) + '.') -n.newline() - -n.variable('ninja_required_version', '1.3') -n.newline() - -n.comment('The arguments passed to configure.py, for rerunning it.') -configure_args = sys.argv[1:] -if '--bootstrap' in configure_args: - configure_args.remove('--bootstrap') -n.variable('configure_args', ' '.join(configure_args)) -env_keys = set(['CXX', 'AR', 'CFLAGS', 'CXXFLAGS', 'LDFLAGS']) -configure_env = dict((k, os.environ[k]) for k in os.environ if k in env_keys) -if configure_env: - config_str = ' '.join([k + '=' + shlex.quote(configure_env[k]) - for k in configure_env]) - n.variable('configure_env', config_str + '$ ') -n.newline() - -CXX = configure_env.get('CXX', 'g++') -objext = '.o' -if platform.is_msvc(): - CXX = 'cl' - objext = '.obj' - -def src(filename): - return os.path.join('$root', 'src', filename) -def built(filename): - return os.path.join('$builddir', filename) -def doc(filename): - return os.path.join('$root', 'doc', filename) -def cc(name, **kwargs): - return n.build(built(name + objext), 'cxx', src(name + '.c'), **kwargs) -def cxx(name, **kwargs): - return n.build(built(name + objext), 'cxx', src(name + '.cc'), **kwargs) -def binary(name): - if platform.is_windows(): - exe = name + '.exe' - n.build(name, 'phony', exe) - return exe - return name - -root = sourcedir -if root == os.getcwd(): - # In the common case where we're building directly in the source - # tree, simplify all the paths to just be cwd-relative. - root = '.' -n.variable('root', root) -n.variable('builddir', 'build') -n.variable('cxx', CXX) -if platform.is_msvc(): - n.variable('ar', 'link') -else: - n.variable('ar', configure_env.get('AR', 'ar')) - -if platform.is_msvc(): - cflags = ['/showIncludes', - '/nologo', # Don't print startup banner. - '/Zi', # Create pdb with debug info. - '/W4', # Highest warning level. - '/WX', # Warnings as errors. - '/wd4530', '/wd4100', '/wd4706', '/wd4244', - '/wd4512', '/wd4800', '/wd4702', '/wd4819', - # Disable warnings about constant conditional expressions. - '/wd4127', - # Disable warnings about passing "this" during initialization. - '/wd4355', - # Disable warnings about ignored typedef in DbgHelp.h - '/wd4091', - '/GR-', # Disable RTTI. - # Disable size_t -> int truncation warning. - # We never have strings or arrays larger than 2**31. - '/wd4267', - '/DNOMINMAX', '/D_CRT_SECURE_NO_WARNINGS', - '/D_HAS_EXCEPTIONS=0', - '/DNINJA_PYTHON="%s"' % options.with_python] - if platform.msvc_needs_fs(): - cflags.append('/FS') - ldflags = ['/DEBUG', '/libpath:$builddir'] - if not options.debug: - cflags += ['/Ox', '/DNDEBUG', '/GL'] - ldflags += ['/LTCG', '/OPT:REF', '/OPT:ICF'] -else: - cflags = ['-g', '-Wall', '-Wextra', - '-Wno-deprecated', - '-Wno-missing-field-initializers', - '-Wno-unused-parameter', - '-fno-rtti', - '-fno-exceptions', - '-fvisibility=hidden', '-pipe', - '-DNINJA_PYTHON="%s"' % options.with_python] - if options.debug: - cflags += ['-D_GLIBCXX_DEBUG', '-D_GLIBCXX_DEBUG_PEDANTIC'] - cflags.remove('-fno-rtti') # Needed for above pedanticness. - else: - cflags += ['-O2', '-DNDEBUG'] - try: - proc = subprocess.Popen( - [CXX, '-fdiagnostics-color', '-c', '-x', 'c++', '/dev/null', - '-o', '/dev/null'], - stdout=open(os.devnull, 'wb'), stderr=subprocess.STDOUT) - if proc.wait() == 0: - cflags += ['-fdiagnostics-color'] - except: - pass - if platform.is_mingw(): - cflags += ['-D_WIN32_WINNT=0x0501'] - ldflags = ['-L$builddir'] - if platform.uses_usr_local(): - cflags.append('-I/usr/local/include') - ldflags.append('-L/usr/local/lib') - if platform.is_aix(): - # printf formats for int64_t, uint64_t; large file support - cflags.append('-D__STDC_FORMAT_MACROS') - cflags.append('-D_LARGE_FILES') - - -libs = [] - -if platform.is_mingw(): - cflags.remove('-fvisibility=hidden'); - ldflags.append('-static') -elif platform.is_solaris(): - cflags.remove('-fvisibility=hidden') -elif platform.is_aix(): - cflags.remove('-fvisibility=hidden') -elif platform.is_msvc(): - pass -else: - if options.profile == 'gmon': - cflags.append('-pg') - ldflags.append('-pg') - elif options.profile == 'pprof': - cflags.append('-fno-omit-frame-pointer') - libs.extend(['-Wl,--no-as-needed', '-lprofiler']) - -if platform.supports_ppoll() and not options.force_pselect: - cflags.append('-DUSE_PPOLL') -if platform.supports_ninja_browse(): - cflags.append('-DNINJA_HAVE_BROWSE') - -# Search for generated headers relative to build dir. -cflags.append('-I.') - -def shell_escape(str): - """Escape str such that it's interpreted as a single argument by - the shell.""" - - # This isn't complete, but it's just enough to make NINJA_PYTHON work. - if platform.is_windows(): - return str - if '"' in str: - return "'%s'" % str.replace("'", "\\'") - return str - -if 'CFLAGS' in configure_env: - cflags.append(configure_env['CFLAGS']) - ldflags.append(configure_env['CFLAGS']) -if 'CXXFLAGS' in configure_env: - cflags.append(configure_env['CXXFLAGS']) - ldflags.append(configure_env['CXXFLAGS']) -n.variable('cflags', ' '.join(shell_escape(flag) for flag in cflags)) -if 'LDFLAGS' in configure_env: - ldflags.append(configure_env['LDFLAGS']) -n.variable('ldflags', ' '.join(shell_escape(flag) for flag in ldflags)) -n.newline() - -if platform.is_msvc(): - n.rule('cxx', - command='$cxx $cflags -c $in /Fo$out /Fd' + built('$pdb'), - description='CXX $out', - # deps='msvc' # /showIncludes is included in $cflags. - ) -else: - n.rule('cxx', - command='$cxx -MMD -MT $out -MF $out.d $cflags -c $in -o $out', - depfile='$out.d', - # deps='gcc', - description='CXX $out') -n.newline() - -if host.is_msvc(): - n.rule('ar', - command='lib /nologo /ltcg /out:$out $in', - description='LIB $out') -elif host.is_mingw(): - n.rule('ar', - command='cmd /c $ar cqs $out.tmp $in && move /Y $out.tmp $out', - description='AR $out') -else: - n.rule('ar', - command='rm -f $out && $ar crs $out $in', - description='AR $out') -n.newline() - -if platform.is_msvc(): - n.rule('link', - command='$cxx $in $libs /nologo /link $ldflags /out:$out', - description='LINK $out') -else: - n.rule('link', - command='$cxx $ldflags -o $out $in $libs', - description='LINK $out') -n.newline() - -objs = [] - -if platform.supports_ninja_browse(): - n.comment('browse_py.h is used to inline browse.py.') - n.rule('inline', - command='"%s"' % src('inline.sh') + ' $varname < $in > $out', - description='INLINE $out') - n.build(built('browse_py.h'), 'inline', src('browse.py'), - implicit=src('inline.sh'), - variables=[('varname', 'kBrowsePy')]) - n.newline() - - objs += cxx('browse', order_only=built('browse_py.h')) - n.newline() - -n.comment('the depfile parser and ninja lexers are generated using re2c.') -def has_re2c(): - try: - proc = subprocess.Popen(['re2c', '-V'], stdout=subprocess.PIPE) - return int(proc.communicate()[0], 10) >= 1103 - except OSError: - return False -if has_re2c(): - n.rule('re2c', - command='re2c -b -i --no-generation-date -o $out $in', - description='RE2C $out') - # Generate the .cc files in the source directory so we can check them in. - n.build(src('depfile_parser.cc'), 're2c', src('depfile_parser.in.cc')) - n.build(src('lexer.cc'), 're2c', src('lexer.in.cc')) -else: - print("warning: A compatible version of re2c (>= 0.11.3) was not found; " - "changes to src/*.in.cc will not affect your build.") -n.newline() - -n.comment('Core source files all build into ninja library.') -cxxvariables = [] -if platform.is_msvc(): - cxxvariables = [('pdb', 'ninja.pdb')] -for name in ['build', - 'build_log', - 'clean', - 'clparser', - 'debug_flags', - 'depfile_parser', - 'deps_log', - 'disk_interface', - 'dyndep', - 'dyndep_parser', - 'edit_distance', - 'eval_env', - 'graph', - 'graphviz', - 'lexer', - 'line_printer', - 'manifest_parser', - 'metrics', - 'parser', - 'state', - 'string_piece_util', - 'util', - 'version']: - objs += cxx(name, variables=cxxvariables) -if platform.is_windows(): - for name in ['subprocess-win32', - 'includes_normalize-win32', - 'msvc_helper-win32', - 'msvc_helper_main-win32']: - objs += cxx(name, variables=cxxvariables) - if platform.is_msvc(): - objs += cxx('minidump-win32', variables=cxxvariables) - objs += cc('getopt') -else: - objs += cxx('subprocess-posix') -if platform.is_aix(): - objs += cc('getopt') -if platform.is_msvc(): - ninja_lib = n.build(built('ninja.lib'), 'ar', objs) -else: - ninja_lib = n.build(built('libninja.a'), 'ar', objs) -n.newline() - -if platform.is_msvc(): - libs.append('ninja.lib') -else: - libs.append('-lninja') - -if platform.is_aix(): - libs.append('-lperfstat') - -all_targets = [] - -n.comment('Main executable is library plus main() function.') -objs = cxx('ninja', variables=cxxvariables) -ninja = n.build(binary('ninja'), 'link', objs, implicit=ninja_lib, - variables=[('libs', libs)]) -n.newline() -all_targets += ninja - -if options.bootstrap: - # We've built the ninja binary. Don't run any more commands - # through the bootstrap executor, but continue writing the - # build.ninja file. - n = ninja_writer - -n.comment('Tests all build into ninja_test executable.') - -objs = [] -if platform.is_msvc(): - cxxvariables = [('pdb', 'ninja_test.pdb')] - -for name in ['build_log_test', - 'build_test', - 'clean_test', - 'clparser_test', - 'depfile_parser_test', - 'deps_log_test', - 'dyndep_parser_test', - 'disk_interface_test', - 'edit_distance_test', - 'graph_test', - 'lexer_test', - 'manifest_parser_test', - 'ninja_test', - 'state_test', - 'string_piece_util_test', - 'subprocess_test', - 'test', - 'util_test']: - objs += cxx(name, variables=cxxvariables) -if platform.is_windows(): - for name in ['includes_normalize_test', 'msvc_helper_test']: - objs += cxx(name, variables=cxxvariables) - -ninja_test = n.build(binary('ninja_test'), 'link', objs, implicit=ninja_lib, - variables=[('libs', libs)]) -n.newline() -all_targets += ninja_test - - -n.comment('Ancillary executables.') - -for name in ['build_log_perftest', - 'canon_perftest', - 'depfile_parser_perftest', - 'hash_collision_bench', - 'manifest_parser_perftest', - 'clparser_perftest']: - if platform.is_msvc(): - cxxvariables = [('pdb', name + '.pdb')] - objs = cxx(name, variables=cxxvariables) - all_targets += n.build(binary(name), 'link', objs, - implicit=ninja_lib, variables=[('libs', libs)]) - -n.newline() - -n.comment('Generate a graph using the "graph" tool.') -n.rule('gendot', - command='./ninja -t graph all > $out') -n.rule('gengraph', - command='dot -Tpng $in > $out') -dot = n.build(built('graph.dot'), 'gendot', ['ninja', 'build.ninja']) -n.build('graph.png', 'gengraph', dot) -n.newline() - -n.comment('Generate the manual using asciidoc.') -n.rule('asciidoc', - command='asciidoc -b docbook -d book -o $out $in', - description='ASCIIDOC $out') -n.rule('xsltproc', - command='xsltproc --nonet doc/docbook.xsl $in > $out', - description='XSLTPROC $out') -docbookxml = n.build(built('manual.xml'), 'asciidoc', doc('manual.asciidoc')) -manual = n.build(doc('manual.html'), 'xsltproc', docbookxml, - implicit=[doc('style.css'), doc('docbook.xsl')]) -n.build('manual', 'phony', - order_only=manual) -n.newline() - -n.rule('dblatex', - command='dblatex -q -o $out -p doc/dblatex.xsl $in', - description='DBLATEX $out') -n.build(doc('manual.pdf'), 'dblatex', docbookxml, - implicit=[doc('dblatex.xsl')]) - -n.comment('Generate Doxygen.') -n.rule('doxygen', - command='doxygen $in', - description='DOXYGEN $in') -n.variable('doxygen_mainpage_generator', - src('gen_doxygen_mainpage.sh')) -n.rule('doxygen_mainpage', - command='$doxygen_mainpage_generator $in > $out', - description='DOXYGEN_MAINPAGE $out') -mainpage = n.build(built('doxygen_mainpage'), 'doxygen_mainpage', - ['README', 'COPYING'], - implicit=['$doxygen_mainpage_generator']) -n.build('doxygen', 'doxygen', doc('doxygen.config'), - implicit=mainpage) -n.newline() - -if not host.is_mingw(): - n.comment('Regenerate build files if build script changes.') - n.rule('configure', - command='${configure_env}%s $root/configure.py $configure_args' % - options.with_python, - generator=True) - n.build('build.ninja', 'configure', - implicit=['$root/configure.py', - os.path.normpath('$root/misc/ninja_syntax.py')]) - n.newline() - -n.default(ninja) -n.newline() - -if host.is_linux(): - n.comment('Packaging') - n.rule('rpmbuild', - command="misc/packaging/rpmbuild.sh", - description='Building rpms..') - n.build('rpm', 'rpmbuild') - n.newline() - -n.build('all', 'phony', all_targets) - -n.close() -print('wrote %s.' % BUILD_FILENAME) - -if options.bootstrap: - print('bootstrap complete. rebuilding...') - - rebuild_args = [] - - if platform.can_rebuild_in_place(): - rebuild_args.append('./ninja') - else: - if platform.is_windows(): - bootstrap_exe = 'ninja.bootstrap.exe' - final_exe = 'ninja.exe' - else: - bootstrap_exe = './ninja.bootstrap' - final_exe = './ninja' - - if os.path.exists(bootstrap_exe): - os.unlink(bootstrap_exe) - os.rename(final_exe, bootstrap_exe) - - rebuild_args.append(bootstrap_exe) - rebuild_args.append('-d') - rebuild_args.append('explain') - if options.verbose: - rebuild_args.append('-v') - - subprocess.check_call(rebuild_args) diff --git a/ninja/doc/README.md b/ninja/doc/README.md deleted file mode 100644 index 6afe5d4672e..00000000000 --- a/ninja/doc/README.md +++ /dev/null @@ -1,11 +0,0 @@ -This directory contains the Ninja manual and support files used in -building it. Here's a brief overview of how it works. - -The source text, `manual.asciidoc`, is written in the AsciiDoc format. -AsciiDoc can generate HTML but it doesn't look great; instead, we use -AsciiDoc to generate the Docbook XML format and then provide our own -Docbook XSL tweaks to produce HTML from that. - -In theory using AsciiDoc and DocBook allows us to produce nice PDF -documentation etc. In reality it's not clear anyone wants that, but the -build rules are in place to generate it if you install dblatex. diff --git a/ninja/doc/dblatex.xsl b/ninja/doc/dblatex.xsl deleted file mode 100644 index c0da212708b..00000000000 --- a/ninja/doc/dblatex.xsl +++ /dev/null @@ -1,7 +0,0 @@ - - - - 0 - 0 - diff --git a/ninja/doc/docbook.xsl b/ninja/doc/docbook.xsl deleted file mode 100644 index 2235be2e37a..00000000000 --- a/ninja/doc/docbook.xsl +++ /dev/null @@ -1,34 +0,0 @@ - - -]> - - - - - - - - - - - book toc - - - 0 - - - 1 - - - ul - - - diff --git a/ninja/doc/doxygen.config b/ninja/doc/doxygen.config deleted file mode 100644 index d933021e2ba..00000000000 --- a/ninja/doc/doxygen.config +++ /dev/null @@ -1,1250 +0,0 @@ -# Doxyfile 1.4.5 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "Ninja" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -# PROJECT_NUMBER = "0" - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = "doc/doxygen/" - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Brazilian, Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, -# Dutch, Finnish, French, German, Greek, Hungarian, Italian, Japanese, -# Japanese-en (Japanese with English messages), Korean, Korean-en, Norwegian, -# Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish, -# Swedish, and Ukrainian. - -OUTPUT_LANGUAGE = English - -# This tag can be used to specify the encoding used in the generated output. -# The encoding is not always determined by the language that is chosen, -# but also whether or not the output is meant for Windows or non-Windows users. -# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES -# forces the Windows encoding (this is the default for the Windows binary), -# whereas setting the tag to NO uses a Unix-style encoding (the default for -# all platforms other than Windows). - -# Obsolet option. -#USE_WINDOWS_ENCODING = YES - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = YES - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = src - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = src/ - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like the Qt-style comments (thus requiring an -# explicit @brief command for a brief description. - -JAVADOC_AUTOBRIEF = YES - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the DETAILS_AT_TOP tag is set to YES then Doxygen -# will output the detailed description near the top, like JavaDoc. -# If set to NO, the detailed description appears after the member -# documentation. - -# Has become obsolete. -#DETAILS_AT_TOP = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 2 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for Java. -# For instance, namespaces will be presented as packages, qualified scopes -# will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to -# include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -# BUILTIN_STL_SUPPORT = NO - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = YES - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = YES - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = NO - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = YES - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = YES - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = NO - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = YES - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = YES - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is YES. - -SHOW_DIRECTORIES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from the -# version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text " - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = src \ - build/doxygen_mainpage - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py - -FILE_PATTERNS = *.cc \ - *.h - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = src - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = *.cpp \ - *.cc \ - *.h \ - *.hh \ - INSTALL DEPENDENCIES CHANGELOG LICENSE LGPL - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = YES - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = src - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = NO - -# If the REFERENCED_BY_RELATION tag is set to YES (the default) -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES (the default) -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = YES - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 2 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = html - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. -HTML_HEADER = - - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compressed HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = NO - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = NO - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = NO - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be -# generated containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. - -GENERATE_TREEVIEW = YES - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4 - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = YES - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = YES - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = YES - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. This is useful -# if you want to understand what is going on. On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = YES - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = doc/doxygen/html/Ninja.TAGFILE - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = YES - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = YES - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = YES - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = YES - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = NO - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = NO -# UML_LOOK = YES - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will -# generate a call dependency graph for every global function or class method. -# Note that enabling this option will significantly increase the time of a run. -# So in most cases it will be better to enable call graphs for selected -# functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -# Obsolet option. -#MAX_DOT_GRAPH_WIDTH = 1280 - -# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height -# (in pixels) of the graphs generated by dot. If a graph becomes larger than -# this value, doxygen will try to truncate the graph, so that it fits within -# the specified constraint. Beware that most browsers cannot cope with very -# large images. - -# Obsolet option. -#MAX_DOT_GRAPH_HEIGHT = 1024 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that a graph may be further truncated if the graph's -# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH -# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default), -# the graph is not depth-constrained. - -MAX_DOT_GRAPH_DEPTH = 0 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, which results in a white background. -# Warning: Depending on the platform used, enabling this option may lead to -# badly anti-aliased labels on the edges of a graph (i.e. they become hard to -# read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. -# JW -# DOT_MULTI_TARGETS = NO -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -# JW SEARCHENGINE = NO -SEARCHENGINE = YES diff --git a/ninja/doc/manual.asciidoc b/ninja/doc/manual.asciidoc deleted file mode 100644 index 7f3ab8afd18..00000000000 --- a/ninja/doc/manual.asciidoc +++ /dev/null @@ -1,1158 +0,0 @@ -The Ninja build system -====================== - - -Introduction ------------- - -Ninja is yet another build system. It takes as input the -interdependencies of files (typically source code and output -executables) and orchestrates building them, _quickly_. - -Ninja joins a sea of other build systems. Its distinguishing goal is -to be fast. It is born from -http://neugierig.org/software/chromium/notes/2011/02/ninja.html[my -work on the Chromium browser project], which has over 30,000 source -files and whose other build systems (including one built from custom -non-recursive Makefiles) would take ten seconds to start building -after changing one file. Ninja is under a second. - -Philosophical overview -~~~~~~~~~~~~~~~~~~~~~~ - -Where other build systems are high-level languages, Ninja aims to be -an assembler. - -Build systems get slow when they need to make decisions. When you are -in a edit-compile cycle you want it to be as fast as possible -- you -want the build system to do the minimum work necessary to figure out -what needs to be built immediately. - -Ninja contains the barest functionality necessary to describe -arbitrary dependency graphs. Its lack of syntax makes it impossible -to express complex decisions. - -Instead, Ninja is intended to be used with a separate program -generating its input files. The generator program (like the -`./configure` found in autotools projects) can analyze system -dependencies and make as many decisions as possible up front so that -incremental builds stay fast. Going beyond autotools, even build-time -decisions like "which compiler flags should I use?" or "should I -build a debug or release-mode binary?" belong in the `.ninja` file -generator. - -Design goals -~~~~~~~~~~~~ - -Here are the design goals of Ninja: - -* very fast (i.e., instant) incremental builds, even for very large - projects. - -* very little policy about how code is built. Different projects and - higher-level build systems have different opinions about how code - should be built; for example, should built objects live alongside - the sources or should all build output go into a separate directory? - Is there a "package" rule that builds a distributable package of - the project? Sidestep these decisions by trying to allow either to - be implemented, rather than choosing, even if that results in - more verbosity. - -* get dependencies correct, and in particular situations that are - difficult to get right with Makefiles (e.g. outputs need an implicit - dependency on the command line used to generate them; to build C - source code you need to use gcc's `-M` flags for header - dependencies). - -* when convenience and speed are in conflict, prefer speed. - -Some explicit _non-goals_: - -* convenient syntax for writing build files by hand. _You should - generate your ninja files using another program_. This is how we - can sidestep many policy decisions. - -* built-in rules. _Out of the box, Ninja has no rules for - e.g. compiling C code._ - -* build-time customization of the build. _Options belong in - the program that generates the ninja files_. - -* build-time decision-making ability such as conditionals or search - paths. _Making decisions is slow._ - -To restate, Ninja is faster than other build systems because it is -painfully simple. You must tell Ninja exactly what to do when you -create your project's `.ninja` files. - -Comparison to Make -~~~~~~~~~~~~~~~~~~ - -Ninja is closest in spirit and functionality to Make, relying on -simple dependencies between file timestamps. - -But fundamentally, make has a lot of _features_: suffix rules, -functions, built-in rules that e.g. search for RCS files when building -source. Make's language was designed to be written by humans. Many -projects find make alone adequate for their build problems. - -In contrast, Ninja has almost no features; just those necessary to get -builds correct while punting most complexity to generation of the -ninja input files. Ninja by itself is unlikely to be useful for most -projects. - -Here are some of the features Ninja adds to Make. (These sorts of -features can often be implemented using more complicated Makefiles, -but they are not part of make itself.) - -* Ninja has special support for discovering extra dependencies at build - time, making it easy to get <> - correct for C/C++ code. - -* A build edge may have multiple outputs. - -* Outputs implicitly depend on the command line that was used to generate - them, which means that changing e.g. compilation flags will cause - the outputs to rebuild. - -* Output directories are always implicitly created before running the - command that relies on them. - -* Rules can provide shorter descriptions of the command being run, so - you can print e.g. `CC foo.o` instead of a long command line while - building. - -* Builds are always run in parallel, based by default on the number of - CPUs your system has. Underspecified build dependencies will result - in incorrect builds. - -* Command output is always buffered. This means commands running in - parallel don't interleave their output, and when a command fails we - can print its failure output next to the full command line that - produced the failure. - - -Using Ninja for your project ----------------------------- - -Ninja currently works on Unix-like systems and Windows. It's seen the -most testing on Linux (and has the best performance there) but it runs -fine on Mac OS X and FreeBSD. - -If your project is small, Ninja's speed impact is likely unnoticeable. -(However, even for small projects it sometimes turns out that Ninja's -limited syntax forces simpler build rules that result in faster -builds.) Another way to say this is that if you're happy with the -edit-compile cycle time of your project already then Ninja won't help. - -There are many other build systems that are more user-friendly or -featureful than Ninja itself. For some recommendations: the Ninja -author found http://gittup.org/tup/[the tup build system] influential -in Ninja's design, and thinks https://github.com/apenwarr/redo[redo]'s -design is quite clever. - -Ninja's benefit comes from using it in conjunction with a smarter -meta-build system. - -https://gn.googlesource.com/gn/[gn]:: The meta-build system used to -generate build files for Google Chrome and related projects (v8, -node.js), as well as Google Fuchsia. gn can generate Ninja files for -all platforms supported by Chrome. - -https://cmake.org/[CMake]:: A widely used meta-build system that -can generate Ninja files on Linux as of CMake version 2.8.8. Newer versions -of CMake support generating Ninja files on Windows and Mac OS X too. - -https://github.com/ninja-build/ninja/wiki/List-of-generators-producing-ninja-build-files[others]:: Ninja ought to fit perfectly into other meta-build software -like https://premake.github.io/[premake]. If you do this work, -please let us know! - -Running Ninja -~~~~~~~~~~~~~ - -Run `ninja`. By default, it looks for a file named `build.ninja` in -the current directory and builds all out-of-date targets. You can -specify which targets (files) to build as command line arguments. - -There is also a special syntax `target^` for specifying a target -as the first output of some rule containing the source you put in -the command line, if one exists. For example, if you specify target as -`foo.c^` then `foo.o` will get built (assuming you have those targets -in your build files). - -`ninja -h` prints help output. Many of Ninja's flags intentionally -match those of Make; e.g `ninja -C build -j 20` changes into the -`build` directory and runs 20 build commands in parallel. (Note that -Ninja defaults to running commands in parallel anyway, so typically -you don't need to pass `-j`.) - - -Environment variables -~~~~~~~~~~~~~~~~~~~~~ - -Ninja supports one environment variable to control its behavior: -`NINJA_STATUS`, the progress status printed before the rule being run. - -Several placeholders are available: - -`%s`:: The number of started edges. -`%t`:: The total number of edges that must be run to complete the build. -`%p`:: The percentage of started edges. -`%r`:: The number of currently running edges. -`%u`:: The number of remaining edges to start. -`%f`:: The number of finished edges. -`%o`:: Overall rate of finished edges per second -`%c`:: Current rate of finished edges per second (average over builds -specified by `-j` or its default) -`%e`:: Elapsed time in seconds. _(Available since Ninja 1.2.)_ -`%%`:: A plain `%` character. - -The default progress status is `"[%f/%t] "` (note the trailing space -to separate from the build rule). Another example of possible progress status -could be `"[%u/%r/%f] "`. - -Extra tools -~~~~~~~~~~~ - -The `-t` flag on the Ninja command line runs some tools that we have -found useful during Ninja's development. The current tools are: - -[horizontal] -`query`:: dump the inputs and outputs of a given target. - -`browse`:: browse the dependency graph in a web browser. Clicking a -file focuses the view on that file, showing inputs and outputs. This -feature requires a Python installation. By default port 8000 is used -and a web browser will be opened. This can be changed as follows: -+ ----- -ninja -t browse --port=8000 --no-browser mytarget ----- -+ -`graph`:: output a file in the syntax used by `graphviz`, a automatic -graph layout tool. Use it like: -+ ----- -ninja -t graph mytarget | dot -Tpng -ograph.png ----- -+ -In the Ninja source tree, `ninja graph.png` -generates an image for Ninja itself. If no target is given generate a -graph for all root targets. - -`targets`:: output a list of targets either by rule or by depth. If used -like +ninja -t targets rule _name_+ it prints the list of targets -using the given rule to be built. If no rule is given, it prints the source -files (the leaves of the graph). If used like -+ninja -t targets depth _digit_+ it -prints the list of targets in a depth-first manner starting by the root -targets (the ones with no outputs). Indentation is used to mark dependencies. -If the depth is zero it prints all targets. If no arguments are provided -+ninja -t targets depth 1+ is assumed. In this mode targets may be listed -several times. If used like this +ninja -t targets all+ it -prints all the targets available without indentation and it is faster -than the _depth_ mode. - -`commands`:: given a list of targets, print a list of commands which, if -executed in order, may be used to rebuild those targets, assuming that all -output files are out of date. - -`clean`:: remove built files. By default it removes all built files -except for those created by the generator. Adding the `-g` flag also -removes built files created by the generator (see <>). Additional arguments are -targets, which removes the given targets and recursively all files -built for them. -+ -If used like +ninja -t clean -r _rules_+ it removes all files built using -the given rules. -+ -Files created but not referenced in the graph are not removed. This -tool takes in account the +-v+ and the +-n+ options (note that +-n+ -implies +-v+). - -`compdb`:: given a list of rules, each of which is expected to be a -C family language compiler rule whose first input is the name of the -source file, prints on standard output a compilation database in the -http://clang.llvm.org/docs/JSONCompilationDatabase.html[JSON format] expected -by the Clang tooling interface. -_Available since Ninja 1.2._ - -`deps`:: show all dependencies stored in the `.ninja_deps` file. When given a -target, show just the target's dependencies. _Available since Ninja 1.4._ - -`recompact`:: recompact the `.ninja_deps` file. _Available since Ninja 1.4._ - -`rules`:: output the list of all rules (eventually with their description -if they have one). It can be used to know which rule name to pass to -+ninja -t targets rule _name_+ or +ninja -t compdb+. - -Writing your own Ninja files ----------------------------- - -The remainder of this manual is only useful if you are constructing -Ninja files yourself: for example, if you're writing a meta-build -system or supporting a new language. - -Conceptual overview -~~~~~~~~~~~~~~~~~~~ - -Ninja evaluates a graph of dependencies between files, and runs -whichever commands are necessary to make your build target up to date -as determined by file modification times. If you are familiar with -Make, Ninja is very similar. - -A build file (default name: `build.ninja`) provides a list of _rules_ --- short names for longer commands, like how to run the compiler -- -along with a list of _build_ statements saying how to build files -using the rules -- which rule to apply to which inputs to produce -which outputs. - -Conceptually, `build` statements describe the dependency graph of your -project, while `rule` statements describe how to generate the files -along a given edge of the graph. - -Syntax example -~~~~~~~~~~~~~~ - -Here's a basic `.ninja` file that demonstrates most of the syntax. -It will be used as an example for the following sections. - ---------------------------------- -cflags = -Wall - -rule cc - command = gcc $cflags -c $in -o $out - -build foo.o: cc foo.c ---------------------------------- - -Variables -~~~~~~~~~ -Despite the non-goal of being convenient to write by hand, to keep -build files readable (debuggable), Ninja supports declaring shorter -reusable names for strings. A declaration like the following - ----------------- -cflags = -g ----------------- - -can be used on the right side of an equals sign, dereferencing it with -a dollar sign, like this: - ----------------- -rule cc - command = gcc $cflags -c $in -o $out ----------------- - -Variables can also be referenced using curly braces like `${in}`. - -Variables might better be called "bindings", in that a given variable -cannot be changed, only shadowed. There is more on how shadowing works -later in this document. - -Rules -~~~~~ - -Rules declare a short name for a command line. They begin with a line -consisting of the `rule` keyword and a name for the rule. Then -follows an indented set of `variable = value` lines. - -The basic example above declares a new rule named `cc`, along with the -command to run. In the context of a rule, the `command` variable -defines the command to run, `$in` expands to the list of -input files (`foo.c`), and `$out` to the output files (`foo.o`) for the -command. A full list of special variables is provided in -<>. - -Build statements -~~~~~~~~~~~~~~~~ - -Build statements declare a relationship between input and output -files. They begin with the `build` keyword, and have the format -+build _outputs_: _rulename_ _inputs_+. Such a declaration says that -all of the output files are derived from the input files. When the -output files are missing or when the inputs change, Ninja will run the -rule to regenerate the outputs. - -The basic example above describes how to build `foo.o`, using the `cc` -rule. - -In the scope of a `build` block (including in the evaluation of its -associated `rule`), the variable `$in` is the list of inputs and the -variable `$out` is the list of outputs. - -A build statement may be followed by an indented set of `key = value` -pairs, much like a rule. These variables will shadow any variables -when evaluating the variables in the command. For example: - ----------------- -cflags = -Wall -Werror -rule cc - command = gcc $cflags -c $in -o $out - -# If left unspecified, builds get the outer $cflags. -build foo.o: cc foo.c - -# But you can shadow variables like cflags for a particular build. -build special.o: cc special.c - cflags = -Wall - -# The variable was only shadowed for the scope of special.o; -# Subsequent build lines get the outer (original) cflags. -build bar.o: cc bar.c - ----------------- - -For more discussion of how scoping works, consult <>. - -If you need more complicated information passed from the build -statement to the rule (for example, if the rule needs "the file -extension of the first input"), pass that through as an extra -variable, like how `cflags` is passed above. - -If the top-level Ninja file is specified as an output of any build -statement and it is out of date, Ninja will rebuild and reload it -before building the targets requested by the user. - -Generating Ninja files from code -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -`misc/ninja_syntax.py` in the Ninja distribution is a tiny Python -module to facilitate generating Ninja files. It allows you to make -Python calls like `ninja.rule(name='foo', command='bar', -depfile='$out.d')` and it will generate the appropriate syntax. Feel -free to just inline it into your project's build system if it's -useful. - - -More details ------------- - -The `phony` rule -~~~~~~~~~~~~~~~~ - -The special rule name `phony` can be used to create aliases for other -targets. For example: - ----------------- -build foo: phony some/file/in/a/faraway/subdir/foo ----------------- - -This makes `ninja foo` build the longer path. Semantically, the -`phony` rule is equivalent to a plain rule where the `command` does -nothing, but phony rules are handled specially in that they aren't -printed when run, logged (see below), nor do they contribute to the -command count printed as part of the build process. - -`phony` can also be used to create dummy targets for files which -may not exist at build time. If a phony build statement is written -without any dependencies, the target will be considered out of date if -it does not exist. Without a phony build statement, Ninja will report -an error if the file does not exist and is required by the build. - - -Default target statements -~~~~~~~~~~~~~~~~~~~~~~~~~ - -By default, if no targets are specified on the command line, Ninja -will build every output that is not named as an input elsewhere. -You can override this behavior using a default target statement. -A default target statement causes Ninja to build only a given subset -of output files if none are specified on the command line. - -Default target statements begin with the `default` keyword, and have -the format +default _targets_+. A default target statement must appear -after the build statement that declares the target as an output file. -They are cumulative, so multiple statements may be used to extend -the list of default targets. For example: - ----------------- -default foo bar -default baz ----------------- - -This causes Ninja to build the `foo`, `bar` and `baz` targets by -default. - - -[[ref_log]] -The Ninja log -~~~~~~~~~~~~~ - -For each built file, Ninja keeps a log of the command used to build -it. Using this log Ninja can know when an existing output was built -with a different command line than the build files specify (i.e., the -command line changed) and knows to rebuild the file. - -The log file is kept in the build root in a file called `.ninja_log`. -If you provide a variable named `builddir` in the outermost scope, -`.ninja_log` will be kept in that directory instead. - - -[[ref_versioning]] -Version compatibility -~~~~~~~~~~~~~~~~~~~~~ - -_Available since Ninja 1.2._ - -Ninja version labels follow the standard major.minor.patch format, -where the major version is increased on backwards-incompatible -syntax/behavioral changes and the minor version is increased on new -behaviors. Your `build.ninja` may declare a variable named -`ninja_required_version` that asserts the minimum Ninja version -required to use the generated file. For example, - ------ -ninja_required_version = 1.1 ------ - -declares that the build file relies on some feature that was -introduced in Ninja 1.1 (perhaps the `pool` syntax), and that -Ninja 1.1 or greater must be used to build. Unlike other Ninja -variables, this version requirement is checked immediately when -the variable is encountered in parsing, so it's best to put it -at the top of the build file. - -Ninja always warns if the major versions of Ninja and the -`ninja_required_version` don't match; a major version change hasn't -come up yet so it's difficult to predict what behavior might be -required. - -[[ref_headers]] -C/C++ header dependencies -~~~~~~~~~~~~~~~~~~~~~~~~~ - -To get C/C++ header dependencies (or any other build dependency that -works in a similar way) correct Ninja has some extra functionality. - -The problem with headers is that the full list of files that a given -source file depends on can only be discovered by the compiler: -different preprocessor defines and include paths cause different files -to be used. Some compilers can emit this information while building, -and Ninja can use that to get its dependencies perfect. - -Consider: if the file has never been compiled, it must be built anyway, -generating the header dependencies as a side effect. If any file is -later modified (even in a way that changes which headers it depends -on) the modification will cause a rebuild as well, keeping the -dependencies up to date. - -When loading these special dependencies, Ninja implicitly adds extra -build edges such that it is not an error if the listed dependency is -missing. This allows you to delete a header file and rebuild without -the build aborting due to a missing input. - -depfile -^^^^^^^ - -`gcc` (and other compilers like `clang`) support emitting dependency -information in the syntax of a Makefile. (Any command that can write -dependencies in this form can be used, not just `gcc`.) - -To bring this information into Ninja requires cooperation. On the -Ninja side, the `depfile` attribute on the `build` must point to a -path where this data is written. (Ninja only supports the limited -subset of the Makefile syntax emitted by compilers.) Then the command -must know to write dependencies into the `depfile` path. -Use it like in the following example: - ----- -rule cc - depfile = $out.d - command = gcc -MMD -MF $out.d [other gcc flags here] ----- - -The `-MMD` flag to `gcc` tells it to output header dependencies, and -the `-MF` flag tells it where to write them. - -deps -^^^^ - -_(Available since Ninja 1.3.)_ - -It turns out that for large projects (and particularly on Windows, -where the file system is slow) loading these dependency files on -startup is slow. - -Ninja 1.3 can instead process dependencies just after they're generated -and save a compacted form of the same information in a Ninja-internal -database. - -Ninja supports this processing in two forms. - -1. `deps = gcc` specifies that the tool outputs `gcc`-style dependencies - in the form of Makefiles. Adding this to the above example will - cause Ninja to process the `depfile` immediately after the - compilation finishes, then delete the `.d` file (which is only used - as a temporary). - -2. `deps = msvc` specifies that the tool outputs header dependencies - in the form produced by Visual Studio's compiler's - http://msdn.microsoft.com/en-us/library/hdkef6tk(v=vs.90).aspx[`/showIncludes` - flag]. Briefly, this means the tool outputs specially-formatted lines - to its stdout. Ninja then filters these lines from the displayed - output. No `depfile` attribute is necessary, but the localized string - in front of the the header file path. For instance - `msvc_deps_prefix = Note: including file:` - for a English Visual Studio (the default). Should be globally defined. -+ ----- -msvc_deps_prefix = Note: including file: -rule cc - deps = msvc - command = cl /showIncludes -c $in /Fo$out ----- - -If the include directory directives are using absolute paths, your depfile -may result in a mixture of relative and absolute paths. Paths used by other -build rules need to match exactly. Therefore, it is recommended to use -relative paths in these cases. - -[[ref_pool]] -Pools -~~~~~ - -_Available since Ninja 1.1._ - -Pools allow you to allocate one or more rules or edges a finite number -of concurrent jobs which is more tightly restricted than the default -parallelism. - -This can be useful, for example, to restrict a particular expensive rule -(like link steps for huge executables), or to restrict particular build -statements which you know perform poorly when run concurrently. - -Each pool has a `depth` variable which is specified in the build file. -The pool is then referred to with the `pool` variable on either a rule -or a build statement. - -No matter what pools you specify, ninja will never run more concurrent jobs -than the default parallelism, or the number of jobs specified on the command -line (with `-j`). - ----------------- -# No more than 4 links at a time. -pool link_pool - depth = 4 - -# No more than 1 heavy object at a time. -pool heavy_object_pool - depth = 1 - -rule link - ... - pool = link_pool - -rule cc - ... - -# The link_pool is used here. Only 4 links will run concurrently. -build foo.exe: link input.obj - -# A build statement can be exempted from its rule's pool by setting an -# empty pool. This effectively puts the build statement back into the default -# pool, which has infinite depth. -build other.exe: link input.obj - pool = - -# A build statement can specify a pool directly. -# Only one of these builds will run at a time. -build heavy_object1.obj: cc heavy_obj1.cc - pool = heavy_object_pool -build heavy_object2.obj: cc heavy_obj2.cc - pool = heavy_object_pool - ----------------- - -The `console` pool -^^^^^^^^^^^^^^^^^^ - -_Available since Ninja 1.5._ - -There exists a pre-defined pool named `console` with a depth of 1. It has -the special property that any task in the pool has direct access to the -standard input, output and error streams provided to Ninja, which are -normally connected to the user's console (hence the name) but could be -redirected. This can be useful for interactive tasks or long-running tasks -which produce status updates on the console (such as test suites). - -While a task in the `console` pool is running, Ninja's regular output (such -as progress status and output from concurrent tasks) is buffered until -it completes. - -[[ref_ninja_file]] -Ninja file reference --------------------- - -A file is a series of declarations. A declaration can be one of: - -1. A rule declaration, which begins with +rule _rulename_+, and - then has a series of indented lines defining variables. - -2. A build edge, which looks like +build _output1_ _output2_: - _rulename_ _input1_ _input2_+. + - Implicit dependencies may be tacked on the end with +| - _dependency1_ _dependency2_+. + - Order-only dependencies may be tacked on the end with +|| - _dependency1_ _dependency2_+. (See <>.) -+ -Implicit outputs _(available since Ninja 1.7)_ may be added before -the `:` with +| _output1_ _output2_+ and do not appear in `$out`. -(See <>.) - -3. Variable declarations, which look like +_variable_ = _value_+. - -4. Default target statements, which look like +default _target1_ _target2_+. - -5. References to more files, which look like +subninja _path_+ or - +include _path_+. The difference between these is explained below - <>. - -6. A pool declaration, which looks like +pool _poolname_+. Pools are explained - <>. - -[[ref_lexer]] -Lexical syntax -~~~~~~~~~~~~~~ - -Ninja is mostly encoding agnostic, as long as the bytes Ninja cares -about (like slashes in paths) are ASCII. This means e.g. UTF-8 or -ISO-8859-1 input files ought to work. - -Comments begin with `#` and extend to the end of the line. - -Newlines are significant. Statements like `build foo bar` are a set -of space-separated tokens that end at the newline. Newlines and -spaces within a token must be escaped. - -There is only one escape character, `$`, and it has the following -behaviors: - -`$` followed by a newline:: escape the newline (continue the current line -across a line break). - -`$` followed by text:: a variable reference. - -`${varname}`:: alternate syntax for `$varname`. - -`$` followed by space:: a space. (This is only necessary in lists of -paths, where a space would otherwise separate filenames. See below.) - -`$:` :: a colon. (This is only necessary in `build` lines, where a colon -would otherwise terminate the list of outputs.) - -`$$`:: a literal `$`. - -A `build` or `default` statement is first parsed as a space-separated -list of filenames and then each name is expanded. This means that -spaces within a variable will result in spaces in the expanded -filename. - ----- -spaced = foo bar -build $spaced/baz other$ file: ... -# The above build line has two outputs: "foo bar/baz" and "other file". ----- - -In a `name = value` statement, whitespace at the beginning of a value -is always stripped. Whitespace at the beginning of a line after a -line continuation is also stripped. - ----- -two_words_with_one_space = foo $ - bar -one_word_with_no_space = foo$ - bar ----- - -Other whitespace is only significant if it's at the beginning of a -line. If a line is indented more than the previous one, it's -considered part of its parent's scope; if it is indented less than the -previous one, it closes the previous scope. - -[[ref_toplevel]] -Top-level variables -~~~~~~~~~~~~~~~~~~~ - -Two variables are significant when declared in the outermost file scope. - -`builddir`:: a directory for some Ninja output files. See <>. (You can also store other build output - in this directory.) - -`ninja_required_version`:: the minimum version of Ninja required to process - the build correctly. See <>. - - -[[ref_rule]] -Rule variables -~~~~~~~~~~~~~~ - -A `rule` block contains a list of `key = value` declarations that -affect the processing of the rule. Here is a full list of special -keys. - -`command` (_required_):: the command line to run. Each `rule` may - have only one `command` declaration. See <> for more details on quoting and executing multiple commands. - -`depfile`:: path to an optional `Makefile` that contains extra - _implicit dependencies_ (see <>). This is explicitly to support C/C++ header - dependencies; see <>. - -`deps`:: _(Available since Ninja 1.3.)_ if present, must be one of - `gcc` or `msvc` to specify special dependency processing. See - <>. The generated database is - stored as `.ninja_deps` in the `builddir`, see <>. - -`msvc_deps_prefix`:: _(Available since Ninja 1.5.)_ defines the string - which should be stripped from msvc's /showIncludes output. Only - needed when `deps = msvc` and no English Visual Studio version is used. - -`description`:: a short description of the command, used to pretty-print - the command as it's running. The `-v` flag controls whether to print - the full command or its description; if a command fails, the full command - line will always be printed before the command's output. - -`dyndep`:: _(Available since Ninja 1.10.)_ Used only on build statements. - If present, must name one of the build statement inputs. Dynamically - discovered dependency information will be loaded from the file. - See the <> section for details. - -`generator`:: if present, specifies that this rule is used to - re-invoke the generator program. Files built using `generator` - rules are treated specially in two ways: firstly, they will not be - rebuilt if the command line changes; and secondly, they are not - cleaned by default. - -`in`:: the space-separated list of files provided as inputs to the build line - referencing this `rule`, shell-quoted if it appears in commands. (`$in` is - provided solely for convenience; if you need some subset or variant of this - list of files, just construct a new variable with that list and use - that instead.) - -`in_newline`:: the same as `$in` except that multiple inputs are - separated by newlines rather than spaces. (For use with - `$rspfile_content`; this works around a bug in the MSVC linker where - it uses a fixed-size buffer for processing input.) - -`out`:: the space-separated list of files provided as outputs to the build line - referencing this `rule`, shell-quoted if it appears in commands. - -`restat`:: if present, causes Ninja to re-stat the command's outputs - after execution of the command. Each output whose modification time - the command did not change will be treated as though it had never - needed to be built. This may cause the output's reverse - dependencies to be removed from the list of pending build actions. - -`rspfile`, `rspfile_content`:: if present (both), Ninja will use a - response file for the given command, i.e. write the selected string - (`rspfile_content`) to the given file (`rspfile`) before calling the - command and delete the file after successful execution of the - command. -+ -This is particularly useful on Windows OS, where the maximal length of -a command line is limited and response files must be used instead. -+ -Use it like in the following example: -+ ----- -rule link - command = link.exe /OUT$out [usual link flags here] @$out.rsp - rspfile = $out.rsp - rspfile_content = $in - -build myapp.exe: link a.obj b.obj [possibly many other .obj files] ----- - -[[ref_rule_command]] -Interpretation of the `command` variable -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Fundamentally, command lines behave differently on Unixes and Windows. - -On Unixes, commands are arrays of arguments. The Ninja `command` -variable is passed directly to `sh -c`, which is then responsible for -interpreting that string into an argv array. Therefore the quoting -rules are those of the shell, and you can use all the normal shell -operators, like `&&` to chain multiple commands, or `VAR=value cmd` to -set environment variables. - -On Windows, commands are strings, so Ninja passes the `command` string -directly to `CreateProcess`. (In the common case of simply executing -a compiler this means there is less overhead.) Consequently the -quoting rules are deterimined by the called program, which on Windows -are usually provided by the C library. If you need shell -interpretation of the command (such as the use of `&&` to chain -multiple commands), make the command execute the Windows shell by -prefixing the command with `cmd /c`. Ninja may error with "invalid parameter" -which usually indicates that the command line length has been exceeded. - -[[ref_outputs]] -Build outputs -~~~~~~~~~~~~~ - -There are two types of build outputs which are subtly different. - -1. _Explicit outputs_, as listed in a build line. These are - available as the `$out` variable in the rule. -+ -This is the standard form of output to be used for e.g. the -object file of a compile command. - -2. _Implicit outputs_, as listed in a build line with the syntax +| - _out1_ _out2_+ + before the `:` of a build line _(available since - Ninja 1.7)_. The semantics are identical to explicit outputs, - the only difference is that implicit outputs don't show up in the - `$out` variable. -+ -This is for expressing outputs that don't show up on the -command line of the command. - -[[ref_dependencies]] -Build dependencies -~~~~~~~~~~~~~~~~~~ - -There are three types of build dependencies which are subtly different. - -1. _Explicit dependencies_, as listed in a build line. These are - available as the `$in` variable in the rule. Changes in these files - cause the output to be rebuilt; if these file are missing and - Ninja doesn't know how to build them, the build is aborted. -+ -This is the standard form of dependency to be used e.g. for the -source file of a compile command. - -2. _Implicit dependencies_, either as picked up from - a `depfile` attribute on a rule or from the syntax +| _dep1_ - _dep2_+ on the end of a build line. The semantics are identical to - explicit dependencies, the only difference is that implicit dependencies - don't show up in the `$in` variable. -+ -This is for expressing dependencies that don't show up on the -command line of the command; for example, for a rule that runs a -script, the script itself should be an implicit dependency, as -changes to the script should cause the output to rebuild. -+ -Note that dependencies as loaded through depfiles have slightly different -semantics, as described in the <>. - -3. _Order-only dependencies_, expressed with the syntax +|| _dep1_ - _dep2_+ on the end of a build line. When these are out of date, the - output is not rebuilt until they are built, but changes in order-only - dependencies alone do not cause the output to be rebuilt. -+ -Order-only dependencies can be useful for bootstrapping dependencies -that are only discovered during build time: for example, to generate a -header file before starting a subsequent compilation step. (Once the -header is used in compilation, a generated dependency file will then -express the implicit dependency.) - -File paths are compared as is, which means that an absolute path and a -relative path, pointing to the same file, are considered different by Ninja. - -Variable expansion -~~~~~~~~~~~~~~~~~~ - -Variables are expanded in paths (in a `build` or `default` statement) -and on the right side of a `name = value` statement. - -When a `name = value` statement is evaluated, its right-hand side is -expanded immediately (according to the below scoping rules), and -from then on `$name` expands to the static string as the result of the -expansion. It is never the case that you'll need to "double-escape" a -value to prevent it from getting expanded twice. - -All variables are expanded immediately as they're encountered in parsing, -with one important exception: variables in `rule` blocks are expanded -when the rule is _used_, not when it is declared. In the following -example, the `demo` rule prints "this is a demo of bar". - ----- -rule demo - command = echo "this is a demo of $foo" - -build out: demo - foo = bar ----- - -[[ref_scope]] -Evaluation and scoping -~~~~~~~~~~~~~~~~~~~~~~ - -Top-level variable declarations are scoped to the file they occur in. - -Rule declarations are also scoped to the file they occur in. -_(Available since Ninja 1.6)_ - -The `subninja` keyword, used to include another `.ninja` file, -introduces a new scope. The included `subninja` file may use the -variables and rules from the parent file, and shadow their values for the file's -scope, but it won't affect values of the variables in the parent. - -To include another `.ninja` file in the current scope, much like a C -`#include` statement, use `include` instead of `subninja`. - -Variable declarations indented in a `build` block are scoped to the -`build` block. The full lookup order for a variable expanded in a -`build` block (or the `rule` is uses) is: - -1. Special built-in variables (`$in`, `$out`). - -2. Build-level variables from the `build` block. - -3. Rule-level variables from the `rule` block (i.e. `$command`). - (Note from the above discussion on expansion that these are - expanded "late", and may make use of in-scope bindings like `$in`.) - -4. File-level variables from the file that the `build` line was in. - -5. Variables from the file that included that file using the - `subninja` keyword. - -[[ref_dyndep]] -Dynamic Dependencies --------------------- - -_Available since Ninja 1.10._ - -Some use cases require implicit dependency information to be dynamically -discovered from source file content _during the build_ in order to build -correctly on the first run (e.g. Fortran module dependencies). This is -unlike <> which are only needed on the -second run and later to rebuild correctly. A build statement may have a -`dyndep` binding naming one of its inputs to specify that dynamic -dependency information must be loaded from the file. For example: - ----- -build out: ... || foo - dyndep = foo -build foo: ... ----- - -This specifies that file `foo` is a dyndep file. Since it is an input, -the build statement for `out` can never be executed before `foo` is built. -As soon as `foo` is finished Ninja will read it to load dynamically -discovered dependency information for `out`. This may include additional -implicit inputs and/or outputs. Ninja will update the build graph -accordingly and the build will proceed as if the information was known -originally. - -Dyndep file reference -~~~~~~~~~~~~~~~~~~~~~ - -Files specified by `dyndep` bindings use the same <> -as <> and have the following layout. - -1. A version number in the form `[.][]`: -+ ----- -ninja_dyndep_version = 1 ----- -+ -Currently the version number must always be `1` or `1.0` but may have -an arbitrary suffix. - -2. One or more build statements of the form: -+ ----- -build out | imp-outs... : dyndep | imp-ins... ----- -+ -Every statement must specify exactly one explicit output and must use -the rule name `dyndep`. The `| imp-outs...` and `| imp-ins...` portions -are optional. - -3. An optional `restat` <> on each build statement. - -The build statements in a dyndep file must have a one-to-one correspondence -to build statements in the <> that name the -dyndep file in a `dyndep` binding. No dyndep build statement may be omitted -and no extra build statements may be specified. - -Dyndep Examples -~~~~~~~~~~~~~~~ - -Fortran Modules -^^^^^^^^^^^^^^^ - -Consider a Fortran source file `foo.f90` that provides a module -`foo.mod` (an implicit output of compilation) and another source file -`bar.f90` that uses the module (an implicit input of compilation). This -implicit dependency must be discovered before we compile either source -in order to ensure that `bar.f90` never compiles before `foo.f90`, and -that `bar.f90` recompiles when `foo.mod` changes. We can achieve this -as follows: - ----- -rule f95 - command = f95 -o $out -c $in -rule fscan - command = fscan -o $out $in - -build foobar.dd: fscan foo.f90 bar.f90 - -build foo.o: f95 foo.f90 || foobar.dd - dyndep = foobar.dd -build bar.o: f95 bar.f90 || foobar.dd - dyndep = foobar.dd ----- - -In this example the order-only dependencies ensure that `foobar.dd` is -generated before either source compiles. The hypothetical `fscan` tool -scans the source files, assumes each will be compiled to a `.o` of the -same name, and writes `foobar.dd` with content such as: - ----- -ninja_dyndep_version = 1 -build foo.o | foo.mod: dyndep -build bar.o: dyndep | foo.mod ----- - -Ninja will load this file to add `foo.mod` as an implicit output of -`foo.o` and implicit input of `bar.o`. This ensures that the Fortran -sources are always compiled in the proper order and recompiled when -needed. - -Tarball Extraction -^^^^^^^^^^^^^^^^^^ - -Consider a tarball `foo.tar` that we want to extract. The extraction time -can be recorded with a `foo.tar.stamp` file so that extraction repeats if -the tarball changes, but we also would like to re-extract if any of the -outputs is missing. However, the list of outputs depends on the content -of the tarball and cannot be spelled out explicitly in the ninja build file. -We can achieve this as follows: - ----- -rule untar - command = tar xf $in && touch $out -rule scantar - command = scantar --stamp=$stamp --dd=$out $in -build foo.tar.dd: scantar foo.tar - stamp = foo.tar.stamp -build foo.tar.stamp: untar foo.tar || foo.tar.dd - dyndep = foo.tar.dd ----- - -In this example the order-only dependency ensures that `foo.tar.dd` is -built before the tarball extracts. The hypothetical `scantar` tool -will read the tarball (e.g. via `tar tf`) and write `foo.tar.dd` with -content such as: - ----- -ninja_dyndep_version = 1 -build foo.tar.stamp | file1.txt file2.txt : dyndep - restat = 1 ----- - -Ninja will load this file to add `file1.txt` and `file2.txt` as implicit -outputs of `foo.tar.stamp`, and to mark the build statement for `restat`. -On future builds, if any implicit output is missing the tarball will be -extracted again. The `restat` binding tells Ninja to tolerate the fact -that the implicit outputs may not have modification times newer than -the tarball itself (avoiding re-extraction on every build). diff --git a/ninja/doc/style.css b/ninja/doc/style.css deleted file mode 100644 index 9976c03ac36..00000000000 --- a/ninja/doc/style.css +++ /dev/null @@ -1,29 +0,0 @@ -body { - margin: 5ex 10ex; - max-width: 80ex; - line-height: 1.5; - font-family: sans-serif; -} -h1, h2, h3 { - font-weight: normal; -} -pre, code { - font-family: x, monospace; -} -pre { - padding: 1ex; - background: #eee; - border: solid 1px #ddd; - min-width: 0; - font-size: 90%; -} -code { - color: #007; -} -div.chapter { - margin-top: 4em; - border-top: solid 2px black; -} -p { - margin-top: 0; -} diff --git a/ninja/misc/afl-fuzz-tokens/kw_build b/ninja/misc/afl-fuzz-tokens/kw_build deleted file mode 100644 index c795b054e5a..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_build +++ /dev/null @@ -1 +0,0 @@ -build \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/kw_default b/ninja/misc/afl-fuzz-tokens/kw_default deleted file mode 100644 index 331d858ce9b..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_default +++ /dev/null @@ -1 +0,0 @@ -default \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/kw_include b/ninja/misc/afl-fuzz-tokens/kw_include deleted file mode 100644 index 2996fba3563..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_include +++ /dev/null @@ -1 +0,0 @@ -include \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/kw_pool b/ninja/misc/afl-fuzz-tokens/kw_pool deleted file mode 100644 index e783591ae77..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_pool +++ /dev/null @@ -1 +0,0 @@ -pool \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/kw_rule b/ninja/misc/afl-fuzz-tokens/kw_rule deleted file mode 100644 index 841e840f873..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_rule +++ /dev/null @@ -1 +0,0 @@ -rule \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/kw_subninja b/ninja/misc/afl-fuzz-tokens/kw_subninja deleted file mode 100644 index c4fe0c78f16..00000000000 --- a/ninja/misc/afl-fuzz-tokens/kw_subninja +++ /dev/null @@ -1 +0,0 @@ -subninja \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_a b/ninja/misc/afl-fuzz-tokens/misc_a deleted file mode 100644 index 2e65efe2a14..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_a +++ /dev/null @@ -1 +0,0 @@ -a \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_b b/ninja/misc/afl-fuzz-tokens/misc_b deleted file mode 100644 index 63d8dbd40c2..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_b +++ /dev/null @@ -1 +0,0 @@ -b \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_colon b/ninja/misc/afl-fuzz-tokens/misc_colon deleted file mode 100644 index 22ded55aa2c..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_colon +++ /dev/null @@ -1 +0,0 @@ -: \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_cont b/ninja/misc/afl-fuzz-tokens/misc_cont deleted file mode 100644 index 857f13ad1bc..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_cont +++ /dev/null @@ -1 +0,0 @@ -$ diff --git a/ninja/misc/afl-fuzz-tokens/misc_dollar b/ninja/misc/afl-fuzz-tokens/misc_dollar deleted file mode 100644 index 6f4f765ed69..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_dollar +++ /dev/null @@ -1 +0,0 @@ -$ \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_eq b/ninja/misc/afl-fuzz-tokens/misc_eq deleted file mode 100644 index 851c75cc5e7..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_eq +++ /dev/null @@ -1 +0,0 @@ -= \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_indent b/ninja/misc/afl-fuzz-tokens/misc_indent deleted file mode 100644 index 136d06384a4..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_indent +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_pipe b/ninja/misc/afl-fuzz-tokens/misc_pipe deleted file mode 100644 index a3871d45082..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_pipe +++ /dev/null @@ -1 +0,0 @@ -| \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_pipepipe b/ninja/misc/afl-fuzz-tokens/misc_pipepipe deleted file mode 100644 index 27cc728d691..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_pipepipe +++ /dev/null @@ -1 +0,0 @@ -|| \ No newline at end of file diff --git a/ninja/misc/afl-fuzz-tokens/misc_space b/ninja/misc/afl-fuzz-tokens/misc_space deleted file mode 100644 index 0519ecba6ea..00000000000 --- a/ninja/misc/afl-fuzz-tokens/misc_space +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/ninja/misc/afl-fuzz/build.ninja b/ninja/misc/afl-fuzz/build.ninja deleted file mode 100644 index 52cd2f151bb..00000000000 --- a/ninja/misc/afl-fuzz/build.ninja +++ /dev/null @@ -1,5 +0,0 @@ -rule b - command = clang -MMD -MF $out.d -o $out -c $in - description = building $out - -build a.o: b a.c diff --git a/ninja/misc/bash-completion b/ninja/misc/bash-completion deleted file mode 100644 index e604cd438c0..00000000000 --- a/ninja/misc/bash-completion +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add the following to your .bashrc to tab-complete ninja targets -# . path/to/ninja/misc/bash-completion - -_ninja_target() { - local cur prev targets dir line targets_command OPTIND - - # When available, use bash_completion to: - # 1) Complete words when the cursor is in the middle of the word - # 2) Complete paths with files or directories, as appropriate - if _get_comp_words_by_ref cur prev &>/dev/null ; then - case $prev in - -f) - _filedir - return 0 - ;; - -C) - _filedir -d - return 0 - ;; - esac - else - cur="${COMP_WORDS[COMP_CWORD]}" - fi - - if [[ "$cur" == "--"* ]]; then - # there is currently only one argument that takes -- - COMPREPLY=($(compgen -P '--' -W 'version' -- "${cur:2}")) - else - dir="." - line=$(echo ${COMP_LINE} | cut -d" " -f 2-) - # filter out all non relevant arguments but keep C for dirs - while getopts :C:f:j:l:k:nvd:t: opt $line; do - case $opt in - # eval for tilde expansion - C) eval dir="$OPTARG" ;; - esac - done; - targets_command="eval ninja -C \"${dir}\" -t targets all 2>/dev/null | cut -d: -f1" - COMPREPLY=($(compgen -W '`${targets_command}`' -- "$cur")) - fi - return -} -complete -F _ninja_target ninja diff --git a/ninja/misc/ci.py b/ninja/misc/ci.py deleted file mode 100755 index 0c69d898c61..00000000000 --- a/ninja/misc/ci.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -import os - -ignores = [ - '.git/', - 'misc/afl-fuzz-tokens/', - 'ninja_deps', - 'src/depfile_parser.cc', - 'src/lexer.cc', -] - -error_count = 0 - -def error(path, msg): - global error_count - error_count += 1 - print('\x1b[1;31m{}\x1b[0;31m{}\x1b[0m'.format(path, msg)) - -for root, directory, filenames in os.walk('.'): - for filename in filenames: - path = os.path.join(root, filename)[2:] - if any([path.startswith(x) for x in ignores]): - continue - with open(path, 'rb') as file: - line_nr = 1 - try: - for line in [x.decode() for x in file.readlines()]: - if len(line) == 0 or line[-1] != '\n': - error(path, ' missing newline at end of file.') - if len(line) > 1: - if line[-2] == '\r': - error(path, ' has Windows line endings.') - break - if line[-2] == ' ' or line[-2] == '\t': - error(path, ':{} has trailing whitespace.'.format(line_nr)) - line_nr += 1 - except UnicodeError: - pass # binary file - -exit(error_count) diff --git a/ninja/misc/inherited-fds.ninja b/ninja/misc/inherited-fds.ninja deleted file mode 100644 index 671155eb0b3..00000000000 --- a/ninja/misc/inherited-fds.ninja +++ /dev/null @@ -1,23 +0,0 @@ -# This build file prints out a list of open file descriptors in -# Ninja subprocesses, to help verify we don't accidentally leak -# any. - -# Because one fd leak was in the code managing multiple subprocesses, -# this test brings up multiple subprocesses and then dumps the fd -# table of the last one. - -# Use like: ./ninja -f misc/inherited-fds.ninja - -rule sleep - command = sleep 10000 - -rule dump - command = sleep 1; ls -l /proc/self/fd; exit 1 - -build all: phony a b c d e - -build a: sleep -build b: sleep -build c: sleep -build d: sleep -build e: dump diff --git a/ninja/misc/long-slow-build.ninja b/ninja/misc/long-slow-build.ninja deleted file mode 100644 index 46af6bafbe7..00000000000 --- a/ninja/misc/long-slow-build.ninja +++ /dev/null @@ -1,38 +0,0 @@ -# An input file for running a "slow" build. -# Use like: ninja -f misc/long-slow-build.ninja all - -rule sleep - command = sleep 1 - description = SLEEP $out - -build 0: sleep README -build 1: sleep README -build 2: sleep README -build 3: sleep README -build 4: sleep README -build 5: sleep README -build 6: sleep README -build 7: sleep README -build 8: sleep README -build 9: sleep README -build 10: sleep 0 -build 11: sleep 1 -build 12: sleep 2 -build 13: sleep 3 -build 14: sleep 4 -build 15: sleep 5 -build 16: sleep 6 -build 17: sleep 7 -build 18: sleep 8 -build 19: sleep 9 -build 20: sleep 10 -build 21: sleep 11 -build 22: sleep 12 -build 23: sleep 13 -build 24: sleep 14 -build 25: sleep 15 -build 26: sleep 16 -build 27: sleep 17 -build 28: sleep 18 -build 29: sleep 19 -build all: phony 20 21 22 23 24 25 26 27 28 29 diff --git a/ninja/misc/measure.py b/ninja/misc/measure.py deleted file mode 100755 index 8ce95e696b2..00000000000 --- a/ninja/misc/measure.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""measure the runtime of a command by repeatedly running it. -""" - -from __future__ import print_function - -import time -import subprocess -import sys - -devnull = open('/dev/null', 'w') - -def run(cmd, repeat=10): - print('sampling:', end=' ') - sys.stdout.flush() - - samples = [] - for _ in range(repeat): - start = time.time() - subprocess.call(cmd, stdout=devnull, stderr=devnull) - end = time.time() - dt = (end - start) * 1000 - print('%dms' % int(dt), end=' ') - sys.stdout.flush() - samples.append(dt) - print() - - # We're interested in the 'pure' runtime of the code, which is - # conceptually the smallest time we'd see if we ran it enough times - # such that it got the perfect time slices / disk cache hits. - best = min(samples) - # Also print how varied the outputs were in an attempt to make it - # more obvious if something has gone terribly wrong. - err = sum(s - best for s in samples) / float(len(samples)) - print('estimate: %dms (mean err %.1fms)' % (best, err)) - -if __name__ == '__main__': - if len(sys.argv) < 2: - print('usage: measure.py command args...') - sys.exit(1) - run(cmd=sys.argv[1:]) diff --git a/ninja/misc/ninja-mode.el b/ninja/misc/ninja-mode.el deleted file mode 100644 index 8b975d5156f..00000000000 --- a/ninja/misc/ninja-mode.el +++ /dev/null @@ -1,85 +0,0 @@ -;;; ninja-mode.el --- Major mode for editing .ninja files -*- lexical-binding: t -*- - -;; Package-Requires: ((emacs "24")) - -;; Copyright 2011 Google Inc. All Rights Reserved. -;; -;; Licensed under the Apache License, Version 2.0 (the "License"); -;; you may not use this file except in compliance with the License. -;; You may obtain a copy of the License at -;; -;; http://www.apache.org/licenses/LICENSE-2.0 -;; -;; Unless required by applicable law or agreed to in writing, software -;; distributed under the License is distributed on an "AS IS" BASIS, -;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -;; See the License for the specific language governing permissions and -;; limitations under the License. - -;;; Commentary: - -;; Simple emacs mode for editing .ninja files. -;; Just some syntax highlighting for now. - -;;; Code: - -(defvar ninja-keywords - `((,(concat "^" (regexp-opt '("rule" "build" "subninja" "include" - "pool" "default") - 'words)) - . font-lock-keyword-face) - ("\\([[:alnum:]_]+\\) =" 1 font-lock-variable-name-face) - ;; Variable expansion. - ("$[[:alnum:]_]+" . font-lock-variable-name-face) - ("${[[:alnum:]._]+}" . font-lock-variable-name-face) - ;; Rule names - ("rule +\\([[:alnum:]_.-]+\\)" 1 font-lock-function-name-face) - ;; Build Statement - highlight the rule used, - ;; allow for escaped $,: in outputs. - ("build +\\(?:[^:$\n]\\|$[:$]\\)+ *: *\\([[:alnum:]_.-]+\\)" - 1 font-lock-function-name-face))) - -(defvar ninja-mode-syntax-table - (let ((table (make-syntax-table))) - (modify-syntax-entry ?\" "." table) - table) - "Syntax table used in `ninja-mode'.") - -(defun ninja-syntax-propertize (start end) - (save-match-data - (goto-char start) - (while (search-forward "#" end t) - (let ((match-pos (match-beginning 0))) - (when (and - ;; Is it the first non-white character on the line? - (eq match-pos (save-excursion (back-to-indentation) (point))) - (save-excursion - (goto-char (line-end-position 0)) - (or - ;; If we're continuing the previous line, it's not a - ;; comment. - (not (eq ?$ (char-before))) - ;; Except if the previous line is a comment as well, as the - ;; continuation dollar is ignored then. - (nth 4 (syntax-ppss))))) - (put-text-property match-pos (1+ match-pos) 'syntax-table '(11)) - (let ((line-end (line-end-position))) - ;; Avoid putting properties past the end of the buffer. - ;; Otherwise we get an `args-out-of-range' error. - (unless (= line-end (1+ (buffer-size))) - (put-text-property line-end (1+ line-end) 'syntax-table '(12))))))))) - -;;;###autoload -(define-derived-mode ninja-mode prog-mode "ninja" - (set (make-local-variable 'comment-start) "#") - (set (make-local-variable 'parse-sexp-lookup-properties) t) - (set (make-local-variable 'syntax-propertize-function) #'ninja-syntax-propertize) - (setq font-lock-defaults '(ninja-keywords))) - -;; Run ninja-mode for files ending in .ninja. -;;;###autoload -(add-to-list 'auto-mode-alist '("\\.ninja$" . ninja-mode)) - -(provide 'ninja-mode) - -;;; ninja-mode.el ends here diff --git a/ninja/misc/ninja.vim b/ninja/misc/ninja.vim deleted file mode 100644 index c1ffd50b1c4..00000000000 --- a/ninja/misc/ninja.vim +++ /dev/null @@ -1,87 +0,0 @@ -" ninja build file syntax. -" Language: ninja build file as described at -" http://ninja-build.org/manual.html -" Version: 1.5 -" Last Change: 2018/04/05 -" Maintainer: Nicolas Weber -" Version 1.4 of this script is in the upstream vim repository and will be -" included in the next vim release. If you change this, please send your change -" upstream. - -" ninja lexer and parser are at -" https://github.com/ninja-build/ninja/blob/master/src/lexer.in.cc -" https://github.com/ninja-build/ninja/blob/master/src/manifest_parser.cc - -if exists("b:current_syntax") - finish -endif - -let s:cpo_save = &cpo -set cpo&vim - -syn case match - -" Comments are only matched when the # is at the beginning of the line (with -" optional whitespace), as long as the prior line didn't end with a $ -" continuation. -syn match ninjaComment /\(\$\n\)\@" -syn match ninjaKeyword "^rule\>" -syn match ninjaKeyword "^pool\>" -syn match ninjaKeyword "^default\>" -syn match ninjaKeyword "^include\>" -syn match ninjaKeyword "^subninja\>" - -" Both 'build' and 'rule' begin a variable scope that ends -" on the first line without indent. 'rule' allows only a -" limited set of magic variables, 'build' allows general -" let assignments. -" manifest_parser.cc, ParseRule() -syn region ninjaRule start="^rule" end="^\ze\S" contains=TOP transparent -syn keyword ninjaRuleCommand contained containedin=ninjaRule command - \ deps depfile description generator - \ pool restat rspfile rspfile_content - -syn region ninjaPool start="^pool" end="^\ze\S" contains=TOP transparent -syn keyword ninjaPoolCommand contained containedin=ninjaPool depth - -" Strings are parsed as follows: -" lexer.in.cc, ReadEvalString() -" simple_varname = [a-zA-Z0-9_-]+; -" varname = [a-zA-Z0-9_.-]+; -" $$ -> $ -" $\n -> line continuation -" '$ ' -> escaped space -" $simple_varname -> variable -" ${varname} -> variable - -syn match ninjaDollar "\$\$" -syn match ninjaWrapLineOperator "\$$" -syn match ninjaSimpleVar "\$[a-zA-Z0-9_-]\+" -syn match ninjaVar "\${[a-zA-Z0-9_.-]\+}" - -" operators are: -" variable assignment = -" rule definition : -" implicit dependency | -" order-only dependency || -syn match ninjaOperator "\(=\|:\||\|||\)\ze\s" - -hi def link ninjaComment Comment -hi def link ninjaKeyword Keyword -hi def link ninjaRuleCommand Statement -hi def link ninjaPoolCommand Statement -hi def link ninjaDollar ninjaOperator -hi def link ninjaWrapLineOperator ninjaOperator -hi def link ninjaOperator Operator -hi def link ninjaSimpleVar ninjaVar -hi def link ninjaVar Identifier - -let b:current_syntax = "ninja" - -let &cpo = s:cpo_save -unlet s:cpo_save diff --git a/ninja/misc/ninja_syntax.py b/ninja/misc/ninja_syntax.py deleted file mode 100644 index 366dbfddc28..00000000000 --- a/ninja/misc/ninja_syntax.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/python - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import re -import textwrap - -def escape_path(word): - return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') - -class Writer(object): - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write('\n') - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2, break_long_words=False, - break_on_hyphens=False): - self.output.write('# ' + line + '\n') - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = ' '.join(filter(None, value)) # Filter out empty strings. - self._line('%s = %s' % (key, value), indent) - - def pool(self, name, depth): - self._line('pool %s' % name) - self.variable('depth', depth, indent=1) - - def rule(self, name, command, description=None, depfile=None, - generator=False, pool=None, restat=False, rspfile=None, - rspfile_content=None, deps=None): - self._line('rule %s' % name) - self.variable('command', command, indent=1) - if description: - self.variable('description', description, indent=1) - if depfile: - self.variable('depfile', depfile, indent=1) - if generator: - self.variable('generator', '1', indent=1) - if pool: - self.variable('pool', pool, indent=1) - if restat: - self.variable('restat', '1', indent=1) - if rspfile: - self.variable('rspfile', rspfile, indent=1) - if rspfile_content: - self.variable('rspfile_content', rspfile_content, indent=1) - if deps: - self.variable('deps', deps, indent=1) - - def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, - variables=None, implicit_outputs=None, pool=None): - outputs = as_list(outputs) - out_outputs = [escape_path(x) for x in outputs] - all_inputs = [escape_path(x) for x in as_list(inputs)] - - if implicit: - implicit = [escape_path(x) for x in as_list(implicit)] - all_inputs.append('|') - all_inputs.extend(implicit) - if order_only: - order_only = [escape_path(x) for x in as_list(order_only)] - all_inputs.append('||') - all_inputs.extend(order_only) - if implicit_outputs: - implicit_outputs = [escape_path(x) - for x in as_list(implicit_outputs)] - out_outputs.append('|') - out_outputs.extend(implicit_outputs) - - self._line('o %s: %s' % (' '.join(out_outputs), - ' '.join([rule] + all_inputs))) - if pool is not None: - self._line(' pool = %s' % pool) - - if variables: - if isinstance(variables, dict): - iterator = iter(variables.items()) - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line('include %s' % path) - - def subninja(self, path): - self._line('subninja %s' % path) - - def default(self, paths): - self._line('default %s' % ' '.join(as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == '$': - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = ' ' * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(' $') - space = available_space - while True: - space = text.rfind(' ', 0, space) - if (space < 0 or - self._count_dollars_before_index(text, space) % 2 == 0): - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(' ', space + 1) - if (space < 0 or - self._count_dollars_before_index(text, space) % 2 == 0): - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + ' $\n') - text = text[space+1:] - - # Subsequent lines are continuations, so indent them. - leading_space = ' ' * (indent+2) - - self.output.write(leading_space + text + '\n') - - def close(self): - self.output.close() - - -def as_list(input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert '\n' not in string, 'Ninja syntax does not allow newlines' - # We only have one special metacharacter: '$'. - return string.replace('$', '$$') - - -def expand(string, vars, local_vars={}): - """Expand a string containing $vars as Ninja would. - - Note: doesn't handle the full Ninja variable syntax, but it's enough - to make configure.py's use of it work. - """ - def exp(m): - var = m.group(1) - if var == '$': - return '$' - return local_vars.get(var, vars.get(var, '')) - return re.sub(r'\$(\$|\w*)', exp, string) diff --git a/ninja/misc/ninja_syntax_test.py b/ninja/misc/ninja_syntax_test.py deleted file mode 100755 index 90ff9c6bdb5..00000000000 --- a/ninja/misc/ninja_syntax_test.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -try: - from StringIO import StringIO -except ImportError: - from io import StringIO - -import ninja_syntax - -LONGWORD = 'a' * 10 -LONGWORDWITHSPACES = 'a'*5 + '$ ' + 'a'*5 -INDENT = ' ' - -class TestLineWordWrap(unittest.TestCase): - def setUp(self): - self.out = StringIO() - self.n = ninja_syntax.Writer(self.out, width=8) - - def test_single_long_word(self): - # We shouldn't wrap a single long word. - self.n._line(LONGWORD) - self.assertEqual(LONGWORD + '\n', self.out.getvalue()) - - def test_few_long_words(self): - # We should wrap a line where the second word is overlong. - self.n._line(' '.join(['x', LONGWORD, 'y'])) - self.assertEqual(' $\n'.join(['x', - INDENT + LONGWORD, - INDENT + 'y']) + '\n', - self.out.getvalue()) - - def test_comment_wrap(self): - # Filenames should not be wrapped - self.n.comment('Hello /usr/local/build-tools/bin') - self.assertEqual('# Hello\n# /usr/local/build-tools/bin\n', - self.out.getvalue()) - - def test_short_words_indented(self): - # Test that indent is taking into account when breaking subsequent lines. - # The second line should not be ' to tree', as that's longer than the - # test layout width of 8. - self.n._line('line_one to tree') - self.assertEqual('''\ -line_one $ - to $ - tree -''', - self.out.getvalue()) - - def test_few_long_words_indented(self): - # Check wrapping in the presence of indenting. - self.n._line(' '.join(['x', LONGWORD, 'y']), indent=1) - self.assertEqual(' $\n'.join([' ' + 'x', - ' ' + INDENT + LONGWORD, - ' ' + INDENT + 'y']) + '\n', - self.out.getvalue()) - - def test_escaped_spaces(self): - self.n._line(' '.join(['x', LONGWORDWITHSPACES, 'y'])) - self.assertEqual(' $\n'.join(['x', - INDENT + LONGWORDWITHSPACES, - INDENT + 'y']) + '\n', - self.out.getvalue()) - - def test_fit_many_words(self): - self.n = ninja_syntax.Writer(self.out, width=78) - self.n._line('command = cd ../../chrome; python ../tools/grit/grit/format/repack.py ../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak ../out/Debug/gen/chrome/theme_resources_large.pak', 1) - self.assertEqual('''\ - command = cd ../../chrome; python ../tools/grit/grit/format/repack.py $ - ../out/Debug/obj/chrome/chrome_dll.gen/repack/theme_resources_large.pak $ - ../out/Debug/gen/chrome/theme_resources_large.pak -''', - self.out.getvalue()) - - def test_leading_space(self): - self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping - self.n.variable('foo', ['', '-bar', '-somethinglong'], 0) - self.assertEqual('''\ -foo = -bar $ - -somethinglong -''', - self.out.getvalue()) - - def test_embedded_dollar_dollar(self): - self.n = ninja_syntax.Writer(self.out, width=15) # force wrapping - self.n.variable('foo', ['a$$b', '-somethinglong'], 0) - self.assertEqual('''\ -foo = a$$b $ - -somethinglong -''', - self.out.getvalue()) - - def test_two_embedded_dollar_dollars(self): - self.n = ninja_syntax.Writer(self.out, width=17) # force wrapping - self.n.variable('foo', ['a$$b', '-somethinglong'], 0) - self.assertEqual('''\ -foo = a$$b $ - -somethinglong -''', - self.out.getvalue()) - - def test_leading_dollar_dollar(self): - self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping - self.n.variable('foo', ['$$b', '-somethinglong'], 0) - self.assertEqual('''\ -foo = $$b $ - -somethinglong -''', - self.out.getvalue()) - - def test_trailing_dollar_dollar(self): - self.n = ninja_syntax.Writer(self.out, width=14) # force wrapping - self.n.variable('foo', ['a$$', '-somethinglong'], 0) - self.assertEqual('''\ -foo = a$$ $ - -somethinglong -''', - self.out.getvalue()) - -class TestBuild(unittest.TestCase): - def setUp(self): - self.out = StringIO() - self.n = ninja_syntax.Writer(self.out) - - def test_variables_dict(self): - self.n.build('out', 'cc', 'in', variables={'name': 'value'}) - self.assertEqual('''\ -build out: cc in - name = value -''', - self.out.getvalue()) - - def test_variables_list(self): - self.n.build('out', 'cc', 'in', variables=[('name', 'value')]) - self.assertEqual('''\ -build out: cc in - name = value -''', - self.out.getvalue()) - - def test_implicit_outputs(self): - self.n.build('o', 'cc', 'i', implicit_outputs='io') - self.assertEqual('''\ -build o | io: cc i -''', - self.out.getvalue()) - -class TestExpand(unittest.TestCase): - def test_basic(self): - vars = {'x': 'X'} - self.assertEqual('foo', ninja_syntax.expand('foo', vars)) - - def test_var(self): - vars = {'xyz': 'XYZ'} - self.assertEqual('fooXYZ', ninja_syntax.expand('foo$xyz', vars)) - - def test_vars(self): - vars = {'x': 'X', 'y': 'YYY'} - self.assertEqual('XYYY', ninja_syntax.expand('$x$y', vars)) - - def test_space(self): - vars = {} - self.assertEqual('x y z', ninja_syntax.expand('x$ y$ z', vars)) - - def test_locals(self): - vars = {'x': 'a'} - local_vars = {'x': 'b'} - self.assertEqual('a', ninja_syntax.expand('$x', vars)) - self.assertEqual('b', ninja_syntax.expand('$x', vars, local_vars)) - - def test_double(self): - self.assertEqual('a b$c', ninja_syntax.expand('a$ b$$c', {})) - -if __name__ == '__main__': - unittest.main() diff --git a/ninja/misc/output_test.py b/ninja/misc/output_test.py deleted file mode 100755 index 65438190b63..00000000000 --- a/ninja/misc/output_test.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python - -"""Runs ./ninja and checks if the output is correct. - -In order to simulate a smart terminal it uses the 'script' command. -""" - -import os -import platform -import subprocess -import sys -import tempfile -import unittest - -default_env = dict(os.environ) -if 'NINJA_STATUS' in default_env: - del default_env['NINJA_STATUS'] -if 'CLICOLOR_FORCE' in default_env: - del default_env['CLICOLOR_FORCE'] -default_env['TERM'] = '' - -def run(build_ninja, flags='', pipe=False, env=default_env): - with tempfile.NamedTemporaryFile('w') as f: - f.write(build_ninja) - f.flush() - ninja_cmd = './ninja {} -f {}'.format(flags, f.name) - try: - if pipe: - output = subprocess.check_output([ninja_cmd], shell=True, env=env) - elif platform.system() == 'Darwin': - output = subprocess.check_output(['script', '-q', '/dev/null', 'bash', '-c', ninja_cmd], - env=env) - else: - output = subprocess.check_output(['script', '-qfec', ninja_cmd, '/dev/null'], - env=env) - except subprocess.CalledProcessError as err: - sys.stdout.buffer.write(err.output) - raise err - final_output = '' - for line in output.decode('utf-8').splitlines(True): - if len(line) > 0 and line[-1] == '\r': - continue - final_output += line.replace('\r', '') - return final_output - -class Output(unittest.TestCase): - def test_issue_1418(self): - self.assertEqual(run( -'''rule echo - command = sleep $delay && echo $out - description = echo $out - -build a: echo - delay = 3 -build b: echo - delay = 2 -build c: echo - delay = 1 -'''), -'''[1/3] echo c\x1b[K -c -[2/3] echo b\x1b[K -b -[3/3] echo a\x1b[K -a -''') - - def test_issue_1214(self): - print_red = '''rule echo - command = printf '\x1b[31mred\x1b[0m' - description = echo $out - -build a: echo -''' - # Only strip color when ninja's output is piped. - self.assertEqual(run(print_red), -'''[1/1] echo a\x1b[K -\x1b[31mred\x1b[0m -''') - self.assertEqual(run(print_red, pipe=True), -'''[1/1] echo a -red -''') - # Even in verbose mode, colors should still only be stripped when piped. - self.assertEqual(run(print_red, flags='-v'), -'''[1/1] printf '\x1b[31mred\x1b[0m' -\x1b[31mred\x1b[0m -''') - self.assertEqual(run(print_red, flags='-v', pipe=True), -'''[1/1] printf '\x1b[31mred\x1b[0m' -red -''') - - # CLICOLOR_FORCE=1 can be used to disable escape code stripping. - env = default_env.copy() - env['CLICOLOR_FORCE'] = '1' - self.assertEqual(run(print_red, pipe=True, env=env), -'''[1/1] echo a -\x1b[31mred\x1b[0m -''') - -if __name__ == '__main__': - unittest.main() diff --git a/ninja/misc/packaging/ninja.spec b/ninja/misc/packaging/ninja.spec deleted file mode 100644 index 05f5a079a38..00000000000 --- a/ninja/misc/packaging/ninja.spec +++ /dev/null @@ -1,42 +0,0 @@ -Summary: Ninja is a small build system with a focus on speed. -Name: ninja -Version: %{ver} -Release: %{rel}%{?dist} -Group: Development/Tools -License: Apache 2.0 -URL: https://github.com/ninja-build/ninja -Source0: %{name}-%{version}-%{rel}.tar.gz -BuildRoot: %{_tmppath}/%{name}-%{version}-%{rel} - -BuildRequires: asciidoc - -%description -Ninja is yet another build system. It takes as input the interdependencies of files (typically source code and output executables) and -orchestrates building them, quickly. - -Ninja joins a sea of other build systems. Its distinguishing goal is to be fast. It is born from my work on the Chromium browser project, -which has over 30,000 source files and whose other build systems (including one built from custom non-recursive Makefiles) can take ten -seconds to start building after changing one file. Ninja is under a second. - -%prep -%setup -q -n %{name}-%{version}-%{rel} - -%build -echo Building.. -./configure.py --bootstrap -./ninja manual - -%install -mkdir -p %{buildroot}%{_bindir} %{buildroot}%{_docdir} -cp -p ninja %{buildroot}%{_bindir}/ - -%files -%defattr(-, root, root) -%doc COPYING README doc/manual.html -%{_bindir}/* - -%clean -rm -rf %{buildroot} - -#The changelog is built automatically from Git history -%changelog diff --git a/ninja/misc/packaging/rpmbuild.sh b/ninja/misc/packaging/rpmbuild.sh deleted file mode 100755 index 9b74c6588c9..00000000000 --- a/ninja/misc/packaging/rpmbuild.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -echo Building ninja RPMs.. -GITROOT=$(git rev-parse --show-toplevel) -cd $GITROOT - -VER=1.0 -REL=$(git rev-parse --short HEAD)git -RPMTOPDIR=$GITROOT/rpm-build -echo "Ver: $VER, Release: $REL" - -# Create tarball -mkdir -p $RPMTOPDIR/{SOURCES,SPECS} -git archive --format=tar --prefix=ninja-${VER}-${REL}/ HEAD | gzip -c > $RPMTOPDIR/SOURCES/ninja-${VER}-${REL}.tar.gz - -# Convert git log to RPM's ChangeLog format (shown with rpm -qp --changelog ) -sed -e "s/%{ver}/$VER/" -e "s/%{rel}/$REL/" misc/packaging/ninja.spec > $RPMTOPDIR/SPECS/ninja.spec -git log --format="* %cd %aN%n- (%h) %s%d%n" --date=local | sed -r 's/[0-9]+:[0-9]+:[0-9]+ //' >> $RPMTOPDIR/SPECS/ninja.spec - -# Build SRC and binary RPMs -rpmbuild --quiet \ - --define "_topdir $RPMTOPDIR" \ - --define "_rpmdir $PWD" \ - --define "_srcrpmdir $PWD" \ - --define '_rpmfilename %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm' \ - -ba $RPMTOPDIR/SPECS/ninja.spec && - -rm -rf $RPMTOPDIR && -echo Done diff --git a/ninja/misc/write_fake_manifests.py b/ninja/misc/write_fake_manifests.py deleted file mode 100644 index b3594de0bbc..00000000000 --- a/ninja/misc/write_fake_manifests.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python - -"""Writes large manifest files, for manifest parser performance testing. - -The generated manifest files are (eerily) similar in appearance and size to the -ones used in the Chromium project. - -Usage: - python misc/write_fake_manifests.py outdir # Will run for about 5s. - -The program contains a hardcoded random seed, so it will generate the same -output every time it runs. By changing the seed, it's easy to generate many -different sets of manifest files. -""" - -import argparse -import contextlib -import os -import random -import sys - -import ninja_syntax - - -def paretoint(avg, alpha): - """Returns a random integer that's avg on average, following a power law. - alpha determines the shape of the power curve. alpha has to be larger - than 1. The closer alpha is to 1, the higher the variation of the returned - numbers.""" - return int(random.paretovariate(alpha) * avg / (alpha / (alpha - 1))) - - -# Based on http://neugierig.org/software/chromium/class-name-generator.html -def moar(avg_options, p_suffix): - kStart = ['render', 'web', 'browser', 'tab', 'content', 'extension', 'url', - 'file', 'sync', 'content', 'http', 'profile'] - kOption = ['view', 'host', 'holder', 'container', 'impl', 'ref', - 'delegate', 'widget', 'proxy', 'stub', 'context', - 'manager', 'master', 'watcher', 'service', 'file', 'data', - 'resource', 'device', 'info', 'provider', 'internals', 'tracker', - 'api', 'layer'] - kOS = ['win', 'mac', 'aura', 'linux', 'android', 'unittest', 'browsertest'] - num_options = min(paretoint(avg_options, alpha=4), 5) - # The original allows kOption to repeat as long as no consecutive options - # repeat. This version doesn't allow any option repetition. - name = [random.choice(kStart)] + random.sample(kOption, num_options) - if random.random() < p_suffix: - name.append(random.choice(kOS)) - return '_'.join(name) - - -class GenRandom(object): - def __init__(self, src_dir): - self.seen_names = set([None]) - self.seen_defines = set([None]) - self.src_dir = src_dir - - def _unique_string(self, seen, avg_options=1.3, p_suffix=0.1): - s = None - while s in seen: - s = moar(avg_options, p_suffix) - seen.add(s) - return s - - def _n_unique_strings(self, n): - seen = set([None]) - return [self._unique_string(seen, avg_options=3, p_suffix=0.4) - for _ in xrange(n)] - - def target_name(self): - return self._unique_string(p_suffix=0, seen=self.seen_names) - - def path(self): - return os.path.sep.join([ - self._unique_string(self.seen_names, avg_options=1, p_suffix=0) - for _ in xrange(1 + paretoint(0.6, alpha=4))]) - - def src_obj_pairs(self, path, name): - num_sources = paretoint(55, alpha=2) + 1 - return [(os.path.join(self.src_dir, path, s + '.cc'), - os.path.join('obj', path, '%s.%s.o' % (name, s))) - for s in self._n_unique_strings(num_sources)] - - def defines(self): - return [ - '-DENABLE_' + self._unique_string(self.seen_defines).upper() - for _ in xrange(paretoint(20, alpha=3))] - - -LIB, EXE = 0, 1 -class Target(object): - def __init__(self, gen, kind): - self.name = gen.target_name() - self.dir_path = gen.path() - self.ninja_file_path = os.path.join( - 'obj', self.dir_path, self.name + '.ninja') - self.src_obj_pairs = gen.src_obj_pairs(self.dir_path, self.name) - if kind == LIB: - self.output = os.path.join('lib' + self.name + '.a') - elif kind == EXE: - self.output = os.path.join(self.name) - self.defines = gen.defines() - self.deps = [] - self.kind = kind - self.has_compile_depends = random.random() < 0.4 - - -def write_target_ninja(ninja, target, src_dir): - compile_depends = None - if target.has_compile_depends: - compile_depends = os.path.join( - 'obj', target.dir_path, target.name + '.stamp') - ninja.build(compile_depends, 'stamp', target.src_obj_pairs[0][0]) - ninja.newline() - - ninja.variable('defines', target.defines) - ninja.variable('includes', '-I' + src_dir) - ninja.variable('cflags', ['-Wall', '-fno-rtti', '-fno-exceptions']) - ninja.newline() - - for src, obj in target.src_obj_pairs: - ninja.build(obj, 'cxx', src, implicit=compile_depends) - ninja.newline() - - deps = [dep.output for dep in target.deps] - libs = [dep.output for dep in target.deps if dep.kind == LIB] - if target.kind == EXE: - ninja.variable('libs', libs) - if sys.platform == "darwin": - ninja.variable('ldflags', '-Wl,-pie') - link = { LIB: 'alink', EXE: 'link'}[target.kind] - ninja.build(target.output, link, [obj for _, obj in target.src_obj_pairs], - implicit=deps) - - -def write_sources(target, root_dir): - need_main = target.kind == EXE - - includes = [] - - # Include siblings. - for cc_filename, _ in target.src_obj_pairs: - h_filename = os.path.basename(os.path.splitext(cc_filename)[0] + '.h') - includes.append(h_filename) - - # Include deps. - for dep in target.deps: - for cc_filename, _ in dep.src_obj_pairs: - h_filename = os.path.basename( - os.path.splitext(cc_filename)[0] + '.h') - includes.append("%s/%s" % (dep.dir_path, h_filename)) - - for cc_filename, _ in target.src_obj_pairs: - cc_path = os.path.join(root_dir, cc_filename) - h_path = os.path.splitext(cc_path)[0] + '.h' - namespace = os.path.basename(target.dir_path) - class_ = os.path.splitext(os.path.basename(cc_filename))[0] - try: - os.makedirs(os.path.dirname(cc_path)) - except OSError: - pass - - with open(h_path, 'w') as f: - f.write('namespace %s { struct %s { %s(); }; }' % (namespace, - class_, class_)) - with open(cc_path, 'w') as f: - for include in includes: - f.write('#include "%s"\n' % include) - f.write('\n') - f.write('namespace %s { %s::%s() {} }' % (namespace, - class_, class_)) - - if need_main: - f.write('int main(int argc, char **argv) {}\n') - need_main = False - -def write_master_ninja(master_ninja, targets): - """Writes master build.ninja file, referencing all given subninjas.""" - master_ninja.variable('cxx', 'c++') - master_ninja.variable('ld', '$cxx') - if sys.platform == 'darwin': - master_ninja.variable('alink', 'libtool -static') - else: - master_ninja.variable('alink', 'ar rcs') - master_ninja.newline() - - master_ninja.pool('link_pool', depth=4) - master_ninja.newline() - - master_ninja.rule('cxx', description='CXX $out', - command='$cxx -MMD -MF $out.d $defines $includes $cflags -c $in -o $out', - depfile='$out.d', deps='gcc') - master_ninja.rule('alink', description='ARCHIVE $out', - command='rm -f $out && $alink -o $out $in') - master_ninja.rule('link', description='LINK $out', pool='link_pool', - command='$ld $ldflags -o $out $in $libs') - master_ninja.rule('stamp', description='STAMP $out', command='touch $out') - master_ninja.newline() - - for target in targets: - master_ninja.subninja(target.ninja_file_path) - master_ninja.newline() - - master_ninja.comment('Short names for targets.') - for target in targets: - if target.name != target.output: - master_ninja.build(target.name, 'phony', target.output) - master_ninja.newline() - - master_ninja.build('all', 'phony', [target.output for target in targets]) - master_ninja.default('all') - - -@contextlib.contextmanager -def FileWriter(path): - """Context manager for a ninja_syntax object writing to a file.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - f = open(path, 'w') - yield ninja_syntax.Writer(f) - f.close() - - -def random_targets(num_targets, src_dir): - gen = GenRandom(src_dir) - - # N-1 static libraries, and 1 executable depending on all of them. - targets = [Target(gen, LIB) for i in xrange(num_targets - 1)] - for i in range(len(targets)): - targets[i].deps = [t for t in targets[0:i] if random.random() < 0.05] - - last_target = Target(gen, EXE) - last_target.deps = targets[:] - last_target.src_obj_pairs = last_target.src_obj_pairs[0:10] # Trim. - targets.append(last_target) - return targets - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-s', '--sources', nargs="?", const="src", - help='write sources to directory (relative to output directory)') - parser.add_argument('-t', '--targets', type=int, default=1500, - help='number of targets (default: 1500)') - parser.add_argument('-S', '--seed', type=int, help='random seed', - default=12345) - parser.add_argument('outdir', help='output directory') - args = parser.parse_args() - root_dir = args.outdir - - random.seed(args.seed) - - do_write_sources = args.sources is not None - src_dir = args.sources if do_write_sources else "src" - - targets = random_targets(args.targets, src_dir) - for target in targets: - with FileWriter(os.path.join(root_dir, target.ninja_file_path)) as n: - write_target_ninja(n, target, src_dir) - - if do_write_sources: - write_sources(target, root_dir) - - with FileWriter(os.path.join(root_dir, 'build.ninja')) as master_ninja: - master_ninja.width = 120 - write_master_ninja(master_ninja, targets) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ninja/misc/zsh-completion b/ninja/misc/zsh-completion deleted file mode 100644 index 4cee3b8631f..00000000000 --- a/ninja/misc/zsh-completion +++ /dev/null @@ -1,72 +0,0 @@ -#compdef ninja -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add the following to your .zshrc to tab-complete ninja targets -# fpath=(path/to/ninja/misc/zsh-completion $fpath) - -__get_targets() { - dir="." - if [ -n "${opt_args[-C]}" ]; - then - eval dir="${opt_args[-C]}" - fi - file="build.ninja" - if [ -n "${opt_args[-f]}" ]; - then - eval file="${opt_args[-f]}" - fi - targets_command="ninja -f \"${file}\" -C \"${dir}\" -t targets all" - eval ${targets_command} 2>/dev/null | cut -d: -f1 -} - -__get_tools() { - ninja -t list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 -} - -__get_modes() { - ninja -d list 2>/dev/null | while read -r a b; do echo $a; done | tail -n +2 | sed '$d' -} - -__modes() { - local -a modes - modes=(${(fo)"$(__get_modes)"}) - _describe 'modes' modes -} - -__tools() { - local -a tools - tools=(${(fo)"$(__get_tools)"}) - _describe 'tools' tools -} - -__targets() { - local -a targets - targets=(${(fo)"$(__get_targets)"}) - _describe 'targets' targets -} - -_arguments \ - {-h,--help}'[Show help]' \ - '--version[Print ninja version]' \ - '-C+[Change to directory before doing anything else]:directories:_directories' \ - '-f+[Specify input build file (default=build.ninja)]:files:_files' \ - '-j+[Run N jobs in parallel (default=number of CPUs available)]:number of jobs' \ - '-l+[Do not start new jobs if the load average is greater than N]:number of jobs' \ - '-k+[Keep going until N jobs fail (default=1)]:number of jobs' \ - '-n[Dry run (do not run commands but act like they succeeded)]' \ - '-v[Show all command lines while building]' \ - '-d+[Enable debugging (use -d list to list modes)]:modes:__modes' \ - '-t+[Run a subtool (use -t list to list subtools)]:tools:__tools' \ - '*::targets:__targets' diff --git a/ninja/src/browse.cc b/ninja/src/browse.cc deleted file mode 100644 index c08c9f4d402..00000000000 --- a/ninja/src/browse.cc +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "browse.h" - -#include -#include -#include -#include -#include - -#include "build/browse_py.h" - -void RunBrowsePython(State* state, const char* ninja_command, - const char* input_file, int argc, char* argv[]) { - // Fork off a Python process and have it run our code via its stdin. - // (Actually the Python process becomes the parent.) - int pipefd[2]; - if (pipe(pipefd) < 0) { - perror("ninja: pipe"); - return; - } - - pid_t pid = fork(); - if (pid < 0) { - perror("ninja: fork"); - return; - } - - if (pid > 0) { // Parent. - close(pipefd[1]); - do { - if (dup2(pipefd[0], 0) < 0) { - perror("ninja: dup2"); - break; - } - - std::vector command; - command.push_back(NINJA_PYTHON); - command.push_back("-"); - command.push_back("--ninja-command"); - command.push_back(ninja_command); - command.push_back("-f"); - command.push_back(input_file); - for (int i = 0; i < argc; i++) { - command.push_back(argv[i]); - } - command.push_back(NULL); - execvp(command[0], (char**)&command[0]); - if (errno == ENOENT) { - printf("ninja: %s is required for the browse tool\n", NINJA_PYTHON); - } else { - perror("ninja: execvp"); - } - } while (false); - _exit(1); - } else { // Child. - close(pipefd[0]); - - // Write the script file into the stdin of the Python process. - ssize_t len = write(pipefd[1], kBrowsePy, sizeof(kBrowsePy)); - if (len < (ssize_t)sizeof(kBrowsePy)) - perror("ninja: write"); - close(pipefd[1]); - exit(0); - } -} diff --git a/ninja/src/browse.h b/ninja/src/browse.h deleted file mode 100644 index 8d6d285711f..00000000000 --- a/ninja/src/browse.h +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_BROWSE_H_ -#define NINJA_BROWSE_H_ - -struct State; - -/// Run in "browse" mode, which execs a Python webserver. -/// \a ninja_command is the command used to invoke ninja. -/// \a args are the number of arguments to be passed to the Python script. -/// \a argv are arguments to be passed to the Python script. -/// This function does not return if it runs successfully. -void RunBrowsePython(State* state, const char* ninja_command, - const char* input_file, int argc, char* argv[]); - -#endif // NINJA_BROWSE_H_ diff --git a/ninja/src/browse.py b/ninja/src/browse.py deleted file mode 100755 index 1c9c39b8e87..00000000000 --- a/ninja/src/browse.py +++ /dev/null @@ -1,230 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2001 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Simple web server for browsing dependency graph data. - -This script is inlined into the final executable and spawned by -it when needed. -""" - -from __future__ import print_function - -try: - import http.server as httpserver - import socketserver -except ImportError: - import BaseHTTPServer as httpserver - import SocketServer as socketserver -import argparse -import cgi -import os -import socket -import subprocess -import sys -import webbrowser -try: - from urllib.request import unquote -except ImportError: - from urllib2 import unquote -from collections import namedtuple - -Node = namedtuple('Node', ['inputs', 'rule', 'target', 'outputs']) - -# Ideally we'd allow you to navigate to a build edge or a build node, -# with appropriate views for each. But there's no way to *name* a build -# edge so we can only display nodes. -# -# For a given node, it has at most one input edge, which has n -# different inputs. This becomes node.inputs. (We leave out the -# outputs of the input edge due to what follows.) The node can have -# multiple dependent output edges. Rather than attempting to display -# those, they are summarized by taking the union of all their outputs. -# -# This means there's no single view that shows you all inputs and outputs -# of an edge. But I think it's less confusing than alternatives. - -def match_strip(line, prefix): - if not line.startswith(prefix): - return (False, line) - return (True, line[len(prefix):]) - -def html_escape(text): - return cgi.escape(text, quote=True) - -def parse(text): - lines = iter(text.split('\n')) - - target = None - rule = None - inputs = [] - outputs = [] - - try: - target = next(lines)[:-1] # strip trailing colon - - line = next(lines) - (match, rule) = match_strip(line, ' input: ') - if match: - (match, line) = match_strip(next(lines), ' ') - while match: - type = None - (match, line) = match_strip(line, '| ') - if match: - type = 'implicit' - (match, line) = match_strip(line, '|| ') - if match: - type = 'order-only' - inputs.append((line, type)) - (match, line) = match_strip(next(lines), ' ') - - match, _ = match_strip(line, ' outputs:') - if match: - (match, line) = match_strip(next(lines), ' ') - while match: - outputs.append(line) - (match, line) = match_strip(next(lines), ' ') - except StopIteration: - pass - - return Node(inputs, rule, target, outputs) - -def create_page(body): - return ''' - -''' + body - -def generate_html(node): - document = ['

%s

' % html_escape(node.target)] - - if node.inputs: - document.append('

target is built using rule %s of

' % - html_escape(node.rule)) - if len(node.inputs) > 0: - document.append('
') - for input, type in sorted(node.inputs): - extra = '' - if type: - extra = ' (%s)' % html_escape(type) - document.append('%s%s
' % - (html_escape(input), html_escape(input), extra)) - document.append('
') - - if node.outputs: - document.append('

dependent edges build:

') - document.append('
') - for output in sorted(node.outputs): - document.append('%s
' % - (html_escape(output), html_escape(output))) - document.append('
') - - return '\n'.join(document) - -def ninja_dump(target): - cmd = [args.ninja_command, '-f', args.f, '-t', 'query', target] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - universal_newlines=True) - return proc.communicate() + (proc.returncode,) - -class RequestHandler(httpserver.BaseHTTPRequestHandler): - def do_GET(self): - assert self.path[0] == '/' - target = unquote(self.path[1:]) - - if target == '': - self.send_response(302) - self.send_header('Location', '?' + args.initial_target) - self.end_headers() - return - - if not target.startswith('?'): - self.send_response(404) - self.end_headers() - return - target = target[1:] - - ninja_output, ninja_error, exit_code = ninja_dump(target) - if exit_code == 0: - page_body = generate_html(parse(ninja_output.strip())) - else: - # Relay ninja's error message. - page_body = '

%s

' % html_escape(ninja_error) - - self.send_response(200) - self.end_headers() - self.wfile.write(create_page(page_body).encode('utf-8')) - - def log_message(self, format, *args): - pass # Swallow console spam. - -parser = argparse.ArgumentParser(prog='ninja -t browse') -parser.add_argument('--port', '-p', default=8000, type=int, - help='Port number to use (default %(default)d)') -parser.add_argument('--hostname', '-a', default='localhost', type=str, - help='Hostname to bind to (default %(default)s)') -parser.add_argument('--no-browser', action='store_true', - help='Do not open a webbrowser on startup.') - -parser.add_argument('--ninja-command', default='ninja', - help='Path to ninja binary (default %(default)s)') -parser.add_argument('-f', default='build.ninja', - help='Path to build.ninja file (default %(default)s)') -parser.add_argument('initial_target', default='all', nargs='?', - help='Initial target to show (default %(default)s)') - -class HTTPServer(socketserver.ThreadingMixIn, httpserver.HTTPServer): - # terminate server immediately when Python exits. - daemon_threads = True - -args = parser.parse_args() -port = args.port -hostname = args.hostname -httpd = HTTPServer((hostname,port), RequestHandler) -try: - if hostname == "": - hostname = socket.gethostname() - print('Web server running on %s:%d, ctl-C to abort...' % (hostname,port) ) - print('Web server pid %d' % os.getpid(), file=sys.stderr ) - if not args.no_browser: - webbrowser.open_new('http://%s:%s' % (hostname, port) ) - httpd.serve_forever() -except KeyboardInterrupt: - print() - pass # Swallow console spam. - - diff --git a/ninja/src/build.cc b/ninja/src/build.cc deleted file mode 100644 index 8f06cb4e090..00000000000 --- a/ninja/src/build.cc +++ /dev/null @@ -1,1167 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "build.h" - -#include -#include -#include -#include -#include - -#ifdef _WIN32 -#include -#include -#endif - -#if defined(__SVR4) && defined(__sun) -#include -#endif - -#include "build_log.h" -#if 0 -#include "clparser.h" -#endif -#include "debug_flags.h" -#if 0 -#include "depfile_parser.h" -#include "deps_log.h" -#endif -#include "disk_interface.h" -#include "graph.h" -#include "state.h" -#include "subprocess.h" -#include "util.h" - -namespace { - -/// A CommandRunner that doesn't actually run the commands. -struct DryRunCommandRunner : public CommandRunner { - virtual ~DryRunCommandRunner() {} - - // Overridden from CommandRunner: - virtual bool CanRunMore(); - virtual bool StartCommand(Edge* edge); - virtual bool WaitForCommand(Result* result); - - private: - queue finished_; -}; - -bool DryRunCommandRunner::CanRunMore() { - return true; -} - -bool DryRunCommandRunner::StartCommand(Edge* edge) { - finished_.push(edge); - return true; -} - -bool DryRunCommandRunner::WaitForCommand(Result* result) { - if (finished_.empty()) - return false; - - result->status = ExitSuccess; - result->edge = finished_.front(); - finished_.pop(); - return true; -} - -} // namespace - -BuildStatus::BuildStatus(const BuildConfig& config) - : config_(config), - start_time_millis_(GetTimeMillis()), - started_edges_(0), finished_edges_(0), total_edges_(0), -#if 0 - progress_status_format_(NULL), -#endif - overall_rate_(), current_rate_(config.parallelism) { - - // Don't do anything fancy in verbose mode. - if (config_.verbosity != BuildConfig::NORMAL) - printer_.set_smart_terminal(false); - -#if 0 - progress_status_format_ = getenv("NINJA_STATUS"); - if (!progress_status_format_) - progress_status_format_ = "[%f/%t] "; -#endif -} - -void BuildStatus::PlanHasTotalEdges(int total) { - total_edges_ = total; -} - -void BuildStatus::BuildEdgeStarted(Edge* edge) { - assert(running_edges_.find(edge) == running_edges_.end()); - int start_time = (int)(GetTimeMillis() - start_time_millis_); - running_edges_.insert(make_pair(edge, start_time)); - ++started_edges_; - - if (edge->use_console() || printer_.is_smart_terminal()) - PrintStatus(edge, kEdgeStarted); - - if (edge->use_console()) - printer_.SetConsoleLocked(true); -} - -void BuildStatus::BuildEdgeFinished(Edge* edge, - bool success, - const string& output, - int* start_time, - int* end_time, - FILE* compiler_log - ) { - int64_t now = GetTimeMillis(); - - ++finished_edges_; - - RunningEdgeMap::iterator i = running_edges_.find(edge); - *start_time = i->second; - *end_time = (int)(now - start_time_millis_); - running_edges_.erase(i); - - if (edge->use_console()) - printer_.SetConsoleLocked(false); - -#if 0 - if (config_.verbosity == BuildConfig::QUIET) - return; -#endif - - if (!edge->use_console()) - PrintStatus(edge, kEdgeFinished); - - // Print the command that is spewing before printing its output. - if (!success) { -#if 0 - string outputs; - for (vector::const_iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) - outputs += (*o)->path() + " "; -#endif - if (printer_.supports_color()) { - printer_.PrintOnNewLine("\x1b[31mFAILED:\x1b[0m " + edge->outputs_[0]->path() + "\n"); - } else { - printer_.PrintOnNewLine("FAILED: " + edge->outputs_[0]->path() + "\n"); - } - if (config_.verbosity == BuildConfig::VERBOSE) { - printer_.PrintOnNewLine(edge->EvaluateCommand() + "\n"); - } - } - - if (!output.empty()) { - // ninja sets stdout and stderr of subprocesses to a pipe, to be able to - // check if the output is empty. Some compilers, e.g. clang, check - // isatty(stderr) to decide if they should print colored output. - // To make it possible to use colored output with ninja, subprocesses should - // be run with a flag that forces them to always print color escape codes. - // To make sure these escape codes don't show up in a file if ninja's output - // is piped to a file, ninja strips ansi escape codes again if it's not - // writing to a |smart_terminal_|. - // (Launching subprocesses in pseudo ttys doesn't work because there are - // only a few hundred available on some systems, and ninja can launch - // thousands of parallel compile commands.) - string final_output; - string stripped = StripAnsiEscapeCodes(output); - if (!printer_.supports_color()) - final_output = stripped; - else - final_output = output; - -#ifdef _WIN32 - // Fix extra CR being added on Windows, writing out CR CR LF (#773) - _setmode(_fileno(stdout), _O_BINARY); // Begin Windows extra CR fix -#endif - - printer_.PrintOnNewLine(final_output); - -#ifdef _WIN32 - _setmode(_fileno(stdout), _O_TEXT); // End Windows extra CR fix -#endif - if (compiler_log) { - fputs(stripped.c_str(), compiler_log); - fflush(compiler_log); - } - } -} - -void BuildStatus::BuildLoadDyndeps() { - // The DependencyScan calls EXPLAIN() to print lines explaining why - // it considers a portion of the graph to be out of date. Normally - // this is done before the build starts, but our caller is about to - // load a dyndep file during the build. Doing so may generate more - // exlanation lines (via fprintf directly to stderr), but in an - // interactive console the cursor is currently at the end of a status - // line. Start a new line so that the first explanation does not - // append to the status line. After the explanations are done a - // new build status line will appear. - if (g_explaining) - printer_.PrintOnNewLine(""); -} - -void BuildStatus::BuildStarted() { - overall_rate_.Restart(); - current_rate_.Restart(); -} - -void BuildStatus::BuildFinished() { - printer_.SetConsoleLocked(false); - printer_.PrintOnNewLine(""); -} -#if 0 -string BuildStatus::FormatProgressStatus( - const char* progress_status_format, EdgeStatus status) const { - string out; - char buf[32]; - int percent; - for (const char* s = progress_status_format; *s != '\0'; ++s) { - if (*s == '%') { - ++s; - switch (*s) { - case '%': - out.push_back('%'); - break; - - // Started edges. - case 's': - snprintf(buf, sizeof(buf), "%d", started_edges_); - out += buf; - break; - - // Total edges. - case 't': - snprintf(buf, sizeof(buf), "%d", total_edges_); - out += buf; - break; - - // Running edges. - case 'r': { - int running_edges = started_edges_ - finished_edges_; - // count the edge that just finished as a running edge - if (status == kEdgeFinished) - running_edges++; - snprintf(buf, sizeof(buf), "%d", running_edges); - out += buf; - break; - } - - // Unstarted edges. - case 'u': - snprintf(buf, sizeof(buf), "%d", total_edges_ - started_edges_); - out += buf; - break; - - // Finished edges. - case 'f': - snprintf(buf, sizeof(buf), "%d", finished_edges_); - out += buf; - break; - - // Overall finished edges per second. - case 'o': - overall_rate_.UpdateRate(finished_edges_); - SnprintfRate(overall_rate_.rate(), buf, "%.1f"); - out += buf; - break; - - // Current rate, average over the last '-j' jobs. - case 'c': - current_rate_.UpdateRate(finished_edges_); - SnprintfRate(current_rate_.rate(), buf, "%.1f"); - out += buf; - break; - - // Percentage - case 'p': - percent = (100 * finished_edges_) / total_edges_; - snprintf(buf, sizeof(buf), "%3i%%", percent); - out += buf; - break; - - case 'e': { - double elapsed = overall_rate_.Elapsed(); - snprintf(buf, sizeof(buf), "%.3f", elapsed); - out += buf; - break; - } - - default: - Fatal("unknown placeholder '%%%c' in $NINJA_STATUS", *s); - return ""; - } - } else { - out.push_back(*s); - } - } - - return out; -} -#endif -void BuildStatus::PrintStatus(Edge* edge, EdgeStatus status) { - if (config_.verbosity == BuildConfig::QUIET) - return; - - bool force_full_command = config_.verbosity == BuildConfig::VERBOSE; - char buf[40]; - snprintf(buf,sizeof(buf),"rescript: [%d/%d] ",finished_edges_,total_edges_); - string to_print = buf + edge->outputs_[0]->path(); - -#if 0 - if (to_print.empty() || force_full_command) - to_print = edge->GetBinding("command"); - - to_print = FormatProgressStatus(progress_status_format_, status) + to_print; -#endif - printer_.Print(to_print, - force_full_command ? LinePrinter::FULL : LinePrinter::ELIDE); -} - -Plan::Plan(Builder* builder) - : builder_(builder) - , command_edges_(0) - , wanted_edges_(0) -{} - -void Plan::Reset() { - command_edges_ = 0; - wanted_edges_ = 0; - ready_.clear(); - want_.clear(); -} - -bool Plan::AddTarget(Node* node, string* err) { - return AddSubTarget(node, NULL, err, NULL); -} - -bool Plan::AddSubTarget(Node* node, Node* dependent, string* err, - set* dyndep_walk) { - Edge* edge = node->in_edge(); - if (!edge) { // Leaf node. - if (node->dirty()) { - string referenced; - if (dependent) - referenced = ", needed by '" + dependent->path() + "',"; - *err = "'" + node->path() + "'" + referenced + " missing " - "and no known rule to make it"; - } - return false; - } - - if (edge->outputs_ready()) - return false; // Don't need to do anything. - - // If an entry in want_ does not already exist for edge, create an entry which - // maps to kWantNothing, indicating that we do not want to build this entry itself. - pair::iterator, bool> want_ins = - want_.insert(make_pair(edge, kWantNothing)); - Want& want = want_ins.first->second; - - if (dyndep_walk && want == kWantToFinish) - return false; // Don't need to do anything with already-scheduled edge. - - // If we do need to build edge and we haven't already marked it as wanted, - // mark it now. - if (node->dirty() && want == kWantNothing) { - want = kWantToStart; - EdgeWanted(edge); - if (!dyndep_walk && edge->AllInputsReady()) - ScheduleWork(want_ins.first); - } - - if (dyndep_walk) - dyndep_walk->insert(edge); - - if (!want_ins.second) - return true; // We've already processed the inputs. - - for (vector::iterator i = edge->inputs_.begin(); - i != edge->inputs_.end(); ++i) { - if (!AddSubTarget(*i, node, err, dyndep_walk) && !err->empty()) - return false; - } - - return true; -} - -void Plan::EdgeWanted(Edge* edge) { - ++wanted_edges_; - if (!edge->is_phony()) - ++command_edges_; -} - -Edge* Plan::FindWork() { - if (ready_.empty()) - return NULL; - set::iterator e = ready_.begin(); - Edge* edge = *e; - ready_.erase(e); - return edge; -} - -void Plan::ScheduleWork(map::iterator want_e) { - if (want_e->second == kWantToFinish) { - // This edge has already been scheduled. We can get here again if an edge - // and one of its dependencies share an order-only input, or if a node - // duplicates an out edge (see https://github.com/ninja-build/ninja/pull/519). - // Avoid scheduling the work again. - return; - } - assert(want_e->second == kWantToStart); - want_e->second = kWantToFinish; - - Edge* edge = want_e->first; - Pool* pool = edge->pool(); - if (pool->ShouldDelayEdge()) { - pool->DelayEdge(edge); - pool->RetrieveReadyEdges(&ready_); - } else { - pool->EdgeScheduled(*edge); - ready_.insert(edge); - } -} - -bool Plan::EdgeFinished(Edge* edge, EdgeResult result, string* err) { - map::iterator e = want_.find(edge); - assert(e != want_.end()); - bool directly_wanted = e->second != kWantNothing; - - // See if this job frees up any delayed jobs. - if (directly_wanted) - edge->pool()->EdgeFinished(*edge); - edge->pool()->RetrieveReadyEdges(&ready_); - - // The rest of this function only applies to successful commands. - if (result != kEdgeSucceeded) - return true; - - if (directly_wanted) - --wanted_edges_; - want_.erase(e); - edge->outputs_ready_ = true; - - // Check off any nodes we were waiting for with this edge. - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (!NodeFinished(*o, err)) - return false; - } - return true; -} - -bool Plan::NodeFinished(Node* node, string* err) { - // If this node provides dyndep info, load it now. - if (node->dyndep_pending()) { - assert(builder_ && "dyndep requires Plan to have a Builder"); - // Load the now-clean dyndep file. This will also update the - // build plan and schedule any new work that is ready. - return builder_->LoadDyndeps(node, err); - } - - // See if we we want any edges from this node. - for (vector::const_iterator oe = node->out_edges().begin(); - oe != node->out_edges().end(); ++oe) { - map::iterator want_e = want_.find(*oe); - if (want_e == want_.end()) - continue; - - // See if the edge is now ready. - if (!EdgeMaybeReady(want_e, err)) - return false; - } - return true; -} - -bool Plan::EdgeMaybeReady(map::iterator want_e, string* err) { - Edge* edge = want_e->first; - if (edge->AllInputsReady()) { - if (want_e->second != kWantNothing) { - ScheduleWork(want_e); - } else { - // We do not need to build this edge, but we might need to build one of - // its dependents. - if (!EdgeFinished(edge, kEdgeSucceeded, err)) - return false; - } - } - return true; -} - -bool Plan::CleanNode(DependencyScan* scan, Node* node, string* err) { - node->set_dirty(false); - - for (vector::const_iterator oe = node->out_edges().begin(); - oe != node->out_edges().end(); ++oe) { - // Don't process edges that we don't actually want. - map::iterator want_e = want_.find(*oe); - if (want_e == want_.end() || want_e->second == kWantNothing) - continue; - - // Don't attempt to clean an edge if it failed to load deps. - if ((*oe)->deps_missing_) - continue; - - // If all non-order-only inputs for this edge are now clean, - // we might have changed the dirty state of the outputs. - vector::iterator - begin = (*oe)->inputs_.begin(), - end = (*oe)->inputs_.end() - (*oe)->order_only_deps_; -#if __cplusplus < 201703L -#define MEM_FN mem_fun -#else -#define MEM_FN mem_fn // mem_fun was removed in C++17. -#endif - if (find_if(begin, end, MEM_FN(&Node::dirty)) == end) { - // Recompute most_recent_input. - Node* most_recent_input = NULL; - for (vector::iterator i = begin; i != end; ++i) { - if (!most_recent_input || (*i)->mtime() > most_recent_input->mtime()) - most_recent_input = *i; - } - - // Now, this edge is dirty if any of the outputs are dirty. - // If the edge isn't dirty, clean the outputs and mark the edge as not - // wanted. - bool outputs_dirty = false; - if (!scan->RecomputeOutputsDirty(*oe, most_recent_input, - &outputs_dirty, err)) { - return false; - } - if (!outputs_dirty) { - for (vector::iterator o = (*oe)->outputs_.begin(); - o != (*oe)->outputs_.end(); ++o) { - if (!CleanNode(scan, *o, err)) - return false; - } - - want_e->second = kWantNothing; - --wanted_edges_; - if (!(*oe)->is_phony()) - --command_edges_; - } - } - } - return true; -} - -bool Plan::DyndepsLoaded(DependencyScan* scan, Node* node, - const DyndepFile& ddf, string* err) { - // Recompute the dirty state of all our direct and indirect dependents now - // that our dyndep information has been loaded. - if (!RefreshDyndepDependents(scan, node, err)) - return false; - - // We loaded dyndep information for those out_edges of the dyndep node that - // specify the node in a dyndep binding, but they may not be in the plan. - // Starting with those already in the plan, walk newly-reachable portion - // of the graph through the dyndep-discovered dependencies. - - // Find edges in the the build plan for which we have new dyndep info. - std::vector dyndep_roots; - for (DyndepFile::const_iterator oe = ddf.begin(); oe != ddf.end(); ++oe) { - Edge* edge = oe->first; - - // If the edge outputs are ready we do not need to consider it here. - if (edge->outputs_ready()) - continue; - - map::iterator want_e = want_.find(edge); - - // If the edge has not been encountered before then nothing already in the - // plan depends on it so we do not need to consider the edge yet either. - if (want_e == want_.end()) - continue; - - // This edge is already in the plan so queue it for the walk. - dyndep_roots.push_back(oe); - } - - // Walk dyndep-discovered portion of the graph to add it to the build plan. - std::set dyndep_walk; - for (std::vector::iterator - oei = dyndep_roots.begin(); oei != dyndep_roots.end(); ++oei) { - DyndepFile::const_iterator oe = *oei; - for (vector::const_iterator i = oe->second.implicit_inputs_.begin(); - i != oe->second.implicit_inputs_.end(); ++i) { - if (!AddSubTarget(*i, oe->first->outputs_[0], err, &dyndep_walk) && - !err->empty()) - return false; - } - } - - // Add out edges from this node that are in the plan (just as - // Plan::NodeFinished would have without taking the dyndep code path). - for (vector::const_iterator oe = node->out_edges().begin(); - oe != node->out_edges().end(); ++oe) { - map::iterator want_e = want_.find(*oe); - if (want_e == want_.end()) - continue; - dyndep_walk.insert(want_e->first); - } - - // See if any encountered edges are now ready. - for (set::iterator wi = dyndep_walk.begin(); - wi != dyndep_walk.end(); ++wi) { - map::iterator want_e = want_.find(*wi); - if (want_e == want_.end()) - continue; - if (!EdgeMaybeReady(want_e, err)) - return false; - } - - return true; -} - -bool Plan::RefreshDyndepDependents(DependencyScan* scan, Node* node, - string* err) { - // Collect the transitive closure of dependents and mark their edges - // as not yet visited by RecomputeDirty. - set dependents; - UnmarkDependents(node, &dependents); - - // Update the dirty state of all dependents and check if their edges - // have become wanted. - for (set::iterator i = dependents.begin(); - i != dependents.end(); ++i) { - Node* n = *i; - - // Check if this dependent node is now dirty. Also checks for new cycles. - if (!scan->RecomputeDirty(n, err)) - return false; - if (!n->dirty()) - continue; - - // This edge was encountered before. However, we may not have wanted to - // build it if the outputs were not known to be dirty. With dyndep - // information an output is now known to be dirty, so we want the edge. - Edge* edge = n->in_edge(); - assert(edge && !edge->outputs_ready()); - map::iterator want_e = want_.find(edge); - assert(want_e != want_.end()); - if (want_e->second == kWantNothing) { - want_e->second = kWantToStart; - EdgeWanted(edge); - } - } - return true; -} - -void Plan::UnmarkDependents(Node* node, set* dependents) { - for (vector::const_iterator oe = node->out_edges().begin(); - oe != node->out_edges().end(); ++oe) { - Edge* edge = *oe; - - map::iterator want_e = want_.find(edge); - if (want_e == want_.end()) - continue; - - if (edge->mark_ != Edge::VisitNone) { - edge->mark_ = Edge::VisitNone; - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (dependents->insert(*o).second) - UnmarkDependents(*o, dependents); - } - } - } -} - -void Plan::Dump() { - printf("pending: %d\n", (int)want_.size()); - for (map::iterator e = want_.begin(); e != want_.end(); ++e) { - if (e->second != kWantNothing) - printf("want "); - e->first->Dump(); - } - printf("ready: %d\n", (int)ready_.size()); -} - -struct RealCommandRunner : public CommandRunner { - explicit RealCommandRunner(const BuildConfig& config) : config_(config) {} - virtual ~RealCommandRunner() {} - virtual bool CanRunMore(); - virtual bool StartCommand(Edge* edge); - virtual bool WaitForCommand(Result* result); - virtual vector GetActiveEdges(); - virtual void Abort(); - - const BuildConfig& config_; - SubprocessSet subprocs_; - map subproc_to_edge_; -}; - -vector RealCommandRunner::GetActiveEdges() { - vector edges; - for (map::iterator e = subproc_to_edge_.begin(); - e != subproc_to_edge_.end(); ++e) - edges.push_back(e->second); - return edges; -} - -void RealCommandRunner::Abort() { - subprocs_.Clear(); -} - -bool RealCommandRunner::CanRunMore() { - size_t subproc_number = - subprocs_.running_.size() + subprocs_.finished_.size(); - return (int)subproc_number < config_.parallelism - && ((subprocs_.running_.empty() || config_.max_load_average <= 0.0f) - || GetLoadAverage() < config_.max_load_average); -} - -bool RealCommandRunner::StartCommand(Edge* edge) { - string command = edge->EvaluateCommand(); - Subprocess* subproc = subprocs_.Add(command, edge->use_console()); - if (!subproc) - return false; - subproc_to_edge_.insert(make_pair(subproc, edge)); - - return true; -} - -bool RealCommandRunner::WaitForCommand(Result* result) { - Subprocess* subproc; - while ((subproc = subprocs_.NextFinished()) == NULL) { - bool interrupted = subprocs_.DoWork(); - if (interrupted) - return false; - } - - result->status = subproc->Finish(); - result->output = subproc->GetOutput(); - - map::iterator e = subproc_to_edge_.find(subproc); - result->edge = e->second; - subproc_to_edge_.erase(e); - - delete subproc; - return true; -} - -Builder::Builder(State* state, const BuildConfig& config, - BuildLog* build_log, DepsLog* deps_log, - DiskInterface* disk_interface, - FILE* compiler_log) - : state_(state), config_(config), - compiler_log_(compiler_log), - plan_(this), disk_interface_(disk_interface), - scan_(state, build_log, deps_log, disk_interface, - &config_.depfile_parser_options) { - status_ = new BuildStatus(config); -} - -Builder::~Builder() { - Cleanup(); -} - -void Builder::Cleanup() { - if (command_runner_.get()) { - vector active_edges = command_runner_->GetActiveEdges(); - command_runner_->Abort(); - - for (vector::iterator e = active_edges.begin(); - e != active_edges.end(); ++e) { - string depfile = (*e)->GetUnescapedDepfile(); - for (vector::iterator o = (*e)->outputs_.begin(); - o != (*e)->outputs_.end(); ++o) { - // Only delete this output if it was actually modified. This is - // important for things like the generator where we don't want to - // delete the manifest file if we can avoid it. But if the rule - // uses a depfile, always delete. (Consider the case where we - // need to rebuild an output because of a modified header file - // mentioned in a depfile, and the command touches its depfile - // but is interrupted before it touches its output file.) - string err; - TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), &err); - if (new_mtime == -1) // Log and ignore Stat() errors. - Error("%s", err.c_str()); - if (!depfile.empty() || (*o)->mtime() != new_mtime) - disk_interface_->RemoveFile((*o)->path()); - } - if (!depfile.empty()) - disk_interface_->RemoveFile(depfile); - } - } - - if (compiler_log_) { - compiler_log_ = NULL; - } -} - -Node* Builder::AddTarget(const string& name, string* err) { - Node* node = state_->LookupNode(name); - if (!node) { - *err = "unknown target: '" + name + "'"; - return NULL; - } - if (!AddTarget(node, err)) - return NULL; - return node; -} - -bool Builder::AddTarget(Node* node, string* err) { - if (!scan_.RecomputeDirty(node, err)) - return false; - - if (Edge* in_edge = node->in_edge()) { - if (in_edge->outputs_ready()) - return true; // Nothing to do. - } - - if (!plan_.AddTarget(node, err)) - return false; - - return true; -} - -bool Builder::AlreadyUpToDate() const { - return !plan_.more_to_do(); -} - -bool Builder::Build(string* err) { - assert(!AlreadyUpToDate()); - - status_->PlanHasTotalEdges(plan_.command_edge_count()); - int pending_commands = 0; - int failures_allowed = config_.failures_allowed; - - // Set up the command runner if we haven't done so already. - if (!command_runner_.get()) { - if (config_.dry_run) - command_runner_.reset(new DryRunCommandRunner); - else - command_runner_.reset(new RealCommandRunner(config_)); - } - - // We are about to start the build process. - status_->BuildStarted(); - - // This main loop runs the entire build process. - // It is structured like this: - // First, we attempt to start as many commands as allowed by the - // command runner. - // Second, we attempt to wait for / reap the next finished command. - while (plan_.more_to_do()) { - // See if we can start any more commands. - if (failures_allowed && command_runner_->CanRunMore()) { - if (Edge* edge = plan_.FindWork()) { - if (!StartEdge(edge, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - - if (edge->is_phony()) { - if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - } else { - ++pending_commands; - } - - // We made some progress; go back to the main loop. - continue; - } - } - - // See if we can reap any finished commands. - if (pending_commands) { - CommandRunner::Result result; - if (!command_runner_->WaitForCommand(&result) || - result.status == ExitInterrupted) { - Cleanup(); - status_->BuildFinished(); - *err = "interrupted by user"; - return false; - } - - --pending_commands; - if (!FinishCommand(&result, err)) { - Cleanup(); - status_->BuildFinished(); - return false; - } - - if (!result.success()) { - if (failures_allowed) - failures_allowed--; - } - - // We made some progress; start the main loop over. - continue; - } - - // If we get here, we cannot make any more progress. - status_->BuildFinished(); - if (failures_allowed == 0) { - if (config_.failures_allowed > 1) - *err = "subcommands failed"; - else - *err = "subcommand failed"; - } else if (failures_allowed < config_.failures_allowed) - *err = "cannot make progress due to previous errors"; - else - *err = "stuck [this is a bug]"; - - return false; - } - - status_->BuildFinished(); - return true; -} - -bool Builder::StartEdge(Edge* edge, string* err) { - METRIC_RECORD("StartEdge"); - if (edge->is_phony()) - return true; - - status_->BuildEdgeStarted(edge); - - // Create directories necessary for outputs. - // XXX: this will block; do we care? - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (!disk_interface_->MakeDirs((*o)->path())) - return false; - } - - // Create response file, if needed - // XXX: this may also block; do we care? - string rspfile = edge->GetUnescapedRspfile(); - if (!rspfile.empty()) { - string content = edge->GetBinding("rspfile_content"); - if (!disk_interface_->WriteFile(rspfile, content)) - return false; - } - - // start command computing and run it - if (!command_runner_->StartCommand(edge)) { - err->assign("command '" + edge->EvaluateCommand() + "' failed."); - return false; - } - - return true; -} - -bool Builder::FinishCommand(CommandRunner::Result* result, string* err) { - METRIC_RECORD("FinishCommand"); - - Edge* edge = result->edge; - - // First try to extract dependencies from the result, if any. - // This must happen first as it filters the command output (we want - // to filter /showIncludes output, even on compile failure) and - // extraction itself can fail, which makes the command fail from a - // build perspective. - vector deps_nodes; -#if 0 - string deps_type = edge->GetBinding("deps"); - const string deps_prefix = edge->GetBinding("msvc_deps_prefix"); - if (!deps_type.empty()) { - string extract_err; - if (!ExtractDeps(result, deps_type, deps_prefix, &deps_nodes, - &extract_err) && - result->success()) { - if (!result->output.empty()) - result->output.append("\n"); - result->output.append(extract_err); - result->status = ExitFailure; - } - } -#endif - int start_time, end_time; - status_->BuildEdgeFinished(edge, result->success(), result->output, - &start_time, &end_time, compiler_log_); - - // The rest of this function only applies to successful commands. - if (!result->success()) { - return plan_.EdgeFinished(edge, Plan::kEdgeFailed, err); - } - - // Restat the edge outputs - TimeStamp output_mtime = 0; - bool restat = edge->GetBindingBool("restat"); - if (!config_.dry_run) { - bool node_cleaned = false; - - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - TimeStamp new_mtime = disk_interface_->Stat((*o)->path(), err); - if (new_mtime == -1) - return false; - if (new_mtime > output_mtime) - output_mtime = new_mtime; - if ((*o)->mtime() == new_mtime && restat) { - // The rule command did not change the output. Propagate the clean - // state through the build graph. - // Note that this also applies to nonexistent outputs (mtime == 0). - if (!plan_.CleanNode(&scan_, *o, err)) - return false; - node_cleaned = true; - } - } - - if (node_cleaned) { - TimeStamp restat_mtime = 0; - // If any output was cleaned, find the most recent mtime of any - // (existing) non-order-only input or the depfile. - for (vector::iterator i = edge->inputs_.begin(); - i != edge->inputs_.end() - edge->order_only_deps_; ++i) { - TimeStamp input_mtime = disk_interface_->Stat((*i)->path(), err); - if (input_mtime == -1) - return false; - if (input_mtime > restat_mtime) - restat_mtime = input_mtime; - } - - string depfile = edge->GetUnescapedDepfile(); - if (restat_mtime != 0 && /* deps_type.empty() && */ !depfile.empty()) { - TimeStamp depfile_mtime = disk_interface_->Stat(depfile, err); - if (depfile_mtime == -1) - return false; - if (depfile_mtime > restat_mtime) - restat_mtime = depfile_mtime; - } - - // The total number of edges in the plan may have changed as a result - // of a restat. - status_->PlanHasTotalEdges(plan_.command_edge_count()); - - output_mtime = restat_mtime; - } - } - - if (!plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, err)) - return false; - - // Delete any left over response file. - string rspfile = edge->GetUnescapedRspfile(); - if (!rspfile.empty() && !g_keep_rsp) - disk_interface_->RemoveFile(rspfile); - - if (scan_.build_log()) { - if (!scan_.build_log()->RecordCommand(edge, start_time, end_time, - output_mtime)) { - *err = string("Error writing to build log: ") + strerror(errno); - return false; - } - } - -#if 0 - if (!deps_type.empty() && !config_.dry_run) { - assert(edge->outputs_.size() == 1 && "should have been rejected by parser"); - Node* out = edge->outputs_[0]; - TimeStamp deps_mtime = disk_interface_->Stat(out->path(), err); - if (deps_mtime == -1) - return false; - if (!scan_.deps_log()->RecordDeps(out, deps_mtime, deps_nodes)) { - *err = string("Error writing to deps log: ") + strerror(errno); - return false; - } - } -#endif - - return true; -} -#if 0 -bool Builder::ExtractDeps(CommandRunner::Result* result, - const string& deps_type, - const string& deps_prefix, - vector* deps_nodes, - string* err) { - if (deps_type == "msvc") { - CLParser parser; - string output; - if (!parser.Parse(result->output, deps_prefix, &output, err)) - return false; - result->output = output; - for (set::iterator i = parser.includes_.begin(); - i != parser.includes_.end(); ++i) { - // ~0 is assuming that with MSVC-parsed headers, it's ok to always make - // all backslashes (as some of the slashes will certainly be backslashes - // anyway). This could be fixed if necessary with some additional - // complexity in IncludesNormalize::Relativize. - deps_nodes->push_back(state_->GetNode(*i, ~0u)); - } - } else - if (deps_type == "gcc") { - string depfile = result->edge->GetUnescapedDepfile(); - if (depfile.empty()) { - *err = string("edge with deps=gcc but no depfile makes no sense"); - return false; - } - - // Read depfile content. Treat a missing depfile as empty. - string content; - switch (disk_interface_->ReadFile(depfile, &content, err)) { - case DiskInterface::Okay: - break; - case DiskInterface::NotFound: - err->clear(); - break; - case DiskInterface::OtherError: - return false; - } - if (content.empty()) - return true; - - DepfileParser deps(config_.depfile_parser_options); - if (!deps.Parse(&content, err)) - return false; - - // XXX check depfile matches expected output. - deps_nodes->reserve(deps.ins_.size()); - for (vector::iterator i = deps.ins_.begin(); - i != deps.ins_.end(); ++i) { - uint64_t slash_bits; - if (!CanonicalizePath(const_cast(i->str_), &i->len_, &slash_bits, - err)) - return false; - deps_nodes->push_back(state_->GetNode(*i, slash_bits)); - } - - if (!g_keep_depfile) { - if (disk_interface_->RemoveFile(depfile) < 0) { - *err = string("deleting depfile: ") + strerror(errno) + string("\n"); - return false; - } - } - } else { - Fatal("unknown deps type '%s'", deps_type.c_str()); - } - - return true; -} -#endif -bool Builder::LoadDyndeps(Node* node, string* err) { - status_->BuildLoadDyndeps(); - - // Load the dyndep information provided by this node. - DyndepFile ddf; - if (!scan_.LoadDyndeps(node, &ddf, err)) - return false; - - // Update the build plan to account for dyndep modifications to the graph. - if (!plan_.DyndepsLoaded(&scan_, node, ddf, err)) - return false; - - // New command edges may have been added to the plan. - status_->PlanHasTotalEdges(plan_.command_edge_count()); - - return true; -} diff --git a/ninja/src/build.h b/ninja/src/build.h deleted file mode 100644 index f2984df9aab..00000000000 --- a/ninja/src/build.h +++ /dev/null @@ -1,340 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_BUILD_H_ -#define NINJA_BUILD_H_ - -#include -#include -#include -#include -#include -#include -#include -#include -#include "depfile_parser.h" -#include "graph.h" // XXX needed for DependencyScan; should rearrange. -#include "exit_status.h" -#include "line_printer.h" -#include "metrics.h" -#include "util.h" // int64_t - -struct BuildLog; -struct BuildStatus; -struct Builder; -struct DiskInterface; -struct Edge; -struct Node; -struct State; - -/// Plan stores the state of a build plan: what we intend to build, -/// which steps we're ready to execute. -struct Plan { - Plan(Builder* builder = NULL); - - /// Add a target to our plan (including all its dependencies). - /// Returns false if we don't need to build this target; may - /// fill in |err| with an error message if there's a problem. - bool AddTarget(Node* node, string* err); - - // Pop a ready edge off the queue of edges to build. - // Returns NULL if there's no work to do. - Edge* FindWork(); - - /// Returns true if there's more work to be done. - bool more_to_do() const { return wanted_edges_ > 0 && command_edges_ > 0; } - - /// Dumps the current state of the plan. - void Dump(); - - enum EdgeResult { - kEdgeFailed, - kEdgeSucceeded - }; - - /// Mark an edge as done building (whether it succeeded or failed). - /// If any of the edge's outputs are dyndep bindings of their dependents, - /// this loads dynamic dependencies from the nodes' paths. - /// Returns 'false' if loading dyndep info fails and 'true' otherwise. - bool EdgeFinished(Edge* edge, EdgeResult result, string* err); - - /// Clean the given node during the build. - /// Return false on error. - bool CleanNode(DependencyScan* scan, Node* node, string* err); - - /// Number of edges with commands to run. - int command_edge_count() const { return command_edges_; } - - /// Reset state. Clears want and ready sets. - void Reset(); - - /// Update the build plan to account for modifications made to the graph - /// by information loaded from a dyndep file. - bool DyndepsLoaded(DependencyScan* scan, Node* node, - const DyndepFile& ddf, string* err); -private: - bool RefreshDyndepDependents(DependencyScan* scan, Node* node, string* err); - void UnmarkDependents(Node* node, set* dependents); - bool AddSubTarget(Node* node, Node* dependent, string* err, - set* dyndep_walk); - - /// Update plan with knowledge that the given node is up to date. - /// If the node is a dyndep binding on any of its dependents, this - /// loads dynamic dependencies from the node's path. - /// Returns 'false' if loading dyndep info fails and 'true' otherwise. - bool NodeFinished(Node* node, string* err); - - /// Enumerate possible steps we want for an edge. - enum Want - { - /// We do not want to build the edge, but we might want to build one of - /// its dependents. - kWantNothing, - /// We want to build the edge, but have not yet scheduled it. - kWantToStart, - /// We want to build the edge, have scheduled it, and are waiting - /// for it to complete. - kWantToFinish - }; - - void EdgeWanted(Edge* edge); - bool EdgeMaybeReady(map::iterator want_e, string* err); - - /// Submits a ready edge as a candidate for execution. - /// The edge may be delayed from running, for example if it's a member of a - /// currently-full pool. - void ScheduleWork(map::iterator want_e); - - /// Keep track of which edges we want to build in this plan. If this map does - /// not contain an entry for an edge, we do not want to build the entry or its - /// dependents. If it does contain an entry, the enumeration indicates what - /// we want for the edge. - map want_; - - set ready_; - - Builder* builder_; - - /// Total number of edges that have commands (not phony). - int command_edges_; - - /// Total remaining number of wanted edges. - int wanted_edges_; -}; - -/// CommandRunner is an interface that wraps running the build -/// subcommands. This allows tests to abstract out running commands. -/// RealCommandRunner is an implementation that actually runs commands. -struct CommandRunner { - virtual ~CommandRunner() {} - virtual bool CanRunMore() = 0; - virtual bool StartCommand(Edge* edge) = 0; - - /// The result of waiting for a command. - struct Result { - Result() : edge(NULL) {} - Edge* edge; - ExitStatus status; - string output; - bool success() const { return status == ExitSuccess; } - }; - /// Wait for a command to complete, or return false if interrupted. - virtual bool WaitForCommand(Result* result) = 0; - - virtual vector GetActiveEdges() { return vector(); } - virtual void Abort() {} -}; - -/// Options (e.g. verbosity, parallelism) passed to a build. -struct BuildConfig { - BuildConfig() : verbosity(NORMAL), dry_run(false), parallelism(1), - failures_allowed(10), max_load_average(-0.0f) {} - - enum Verbosity { - NORMAL, - QUIET, // No output -- used when testing. - VERBOSE - }; - Verbosity verbosity; - bool dry_run; - int parallelism; - int failures_allowed; - /// The maximum load average we must not exceed. A negative value - /// means that we do not have any limit. - double max_load_average; - DepfileParserOptions depfile_parser_options; -}; - -/// Builder wraps the build process: starting commands, updating status. -struct Builder { - Builder(State* state, const BuildConfig& config, - BuildLog* build_log, DepsLog* deps_log, - DiskInterface* disk_interface, FILE* compiler_log); - ~Builder(); - - /// Clean up after interrupted commands by deleting output files. - void Cleanup(); - - Node* AddTarget(const string& name, string* err); - - /// Add a target to the build, scanning dependencies. - /// @return false on error. - bool AddTarget(Node* target, string* err); - - /// Returns true if the build targets are already up to date. - bool AlreadyUpToDate() const; - - /// Run the build. Returns false on error. - /// It is an error to call this function when AlreadyUpToDate() is true. - bool Build(string* err); - - bool StartEdge(Edge* edge, string* err); - - /// Update status ninja logs following a command termination. - /// @return false if the build can not proceed further due to a fatal error. - bool FinishCommand(CommandRunner::Result* result, string* err); - - /// Used for tests. - void SetBuildLog(BuildLog* log) { - scan_.set_build_log(log); - } - - /// Load the dyndep information provided by the given node. - bool LoadDyndeps(Node* node, string* err); - - State* state_; - const BuildConfig& config_; - FILE* compiler_log_; - Plan plan_; -#if __cplusplus < 201703L - auto_ptr command_runner_; -#else - unique_ptr command_runner_; // auto_ptr was removed in C++17. -#endif - BuildStatus* status_; - - private: -#if 0 - bool ExtractDeps(CommandRunner::Result* result, const string& deps_type, - const string& deps_prefix, vector* deps_nodes, - string* err); -#endif - DiskInterface* disk_interface_; - DependencyScan scan_; - - // Unimplemented copy ctor and operator= ensure we don't copy the auto_ptr. - Builder(const Builder &other); // DO NOT IMPLEMENT - void operator=(const Builder &other); // DO NOT IMPLEMENT -}; - -/// Tracks the status of a build: completion fraction, printing updates. -struct BuildStatus { - explicit BuildStatus(const BuildConfig& config); - void PlanHasTotalEdges(int total); - void BuildEdgeStarted(Edge* edge); - void BuildEdgeFinished(Edge* edge, bool success, const string& output, - int* start_time, int* end_time, FILE* compiler_log_); - void BuildLoadDyndeps(); - void BuildStarted(); - void BuildFinished(); - - enum EdgeStatus { - kEdgeStarted, - kEdgeFinished, - }; -#if 0 - /// Format the progress status string by replacing the placeholders. - /// See the user manual for more information about the available - /// placeholders. - /// @param progress_status_format The format of the progress status. - /// @param status The status of the edge. - string FormatProgressStatus(const char* progress_status_format, - EdgeStatus status) const; -#endif - private: - void PrintStatus(Edge* edge, EdgeStatus status); - - const BuildConfig& config_; - - /// Time the build started. - int64_t start_time_millis_; - - int started_edges_, finished_edges_, total_edges_; - - /// Map of running edge to time the edge started running. - typedef map RunningEdgeMap; - RunningEdgeMap running_edges_; - - /// Prints progress output. - LinePrinter printer_; -#if 0 - /// The custom progress status format to use. - const char* progress_status_format_; -#endif - template - void SnprintfRate(double rate, char(&buf)[S], const char* format) const { - if (rate == -1) - snprintf(buf, S, "?"); - else - snprintf(buf, S, format, rate); - } - - struct RateInfo { - RateInfo() : rate_(-1) {} - - void Restart() { stopwatch_.Restart(); } - double Elapsed() const { return stopwatch_.Elapsed(); } - double rate() { return rate_; } - - void UpdateRate(int edges) { - if (edges && stopwatch_.Elapsed()) - rate_ = edges / stopwatch_.Elapsed(); - } - - private: - double rate_; - Stopwatch stopwatch_; - }; - - struct SlidingRateInfo { - SlidingRateInfo(int n) : rate_(-1), N(n), last_update_(-1) {} - - void Restart() { stopwatch_.Restart(); } - double rate() { return rate_; } - - void UpdateRate(int update_hint) { - if (update_hint == last_update_) - return; - last_update_ = update_hint; - - if (times_.size() == N) - times_.pop(); - times_.push(stopwatch_.Elapsed()); - if (times_.back() != times_.front()) - rate_ = times_.size() / (times_.back() - times_.front()); - } - - private: - double rate_; - Stopwatch stopwatch_; - const size_t N; - queue times_; - int last_update_; - }; - - mutable RateInfo overall_rate_; - mutable SlidingRateInfo current_rate_; -}; - -#endif // NINJA_BUILD_H_ diff --git a/ninja/src/build_log.cc b/ninja/src/build_log.cc deleted file mode 100644 index cc4e409914a..00000000000 --- a/ninja/src/build_log.cc +++ /dev/null @@ -1,424 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// On AIX, inttypes.h gets indirectly included by build_log.h. -// It's easiest just to ask for the printf format macros right away. -#ifndef _WIN32 -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS -#endif -#endif - -#include "build_log.h" - -#include -#include -#include - -#ifndef _WIN32 -#include -#include -#endif - -#include "build.h" -#include "graph.h" -#include "metrics.h" -#include "util.h" -#if defined(_MSC_VER) && (_MSC_VER < 1800) -#define strtoll _strtoi64 -#endif - -// Implementation details: -// Each run's log appends to the log file. -// To load, we run through all log entries in series, throwing away -// older runs. -// Once the number of redundant entries exceeds a threshold, we write -// out a new file and replace the existing one with it. - -namespace { - -const char kFileSignature[] = "# ninja log v%d\n"; -const int kOldestSupportedVersion = 4; -const int kCurrentVersion = 6; - -// 64bit MurmurHash2, by Austin Appleby -#if defined(_MSC_VER) -#define BIG_CONSTANT(x) (x) -#else // defined(_MSC_VER) -#define BIG_CONSTANT(x) (x##LLU) -#endif // !defined(_MSC_VER) -inline -uint64_t MurmurHash64A(const void* key, size_t len) { - static const uint64_t seed = 0xDECAFBADDECAFBADull; - const uint64_t m = BIG_CONSTANT(0xc6a4a7935bd1e995); - const int r = 47; - uint64_t h = seed ^ (len * m); - const unsigned char* data = (const unsigned char*)key; - while (len >= 8) { - uint64_t k; - memcpy(&k, data, sizeof k); - k *= m; - k ^= k >> r; - k *= m; - h ^= k; - h *= m; - data += 8; - len -= 8; - } - switch (len & 7) - { - case 7: h ^= uint64_t(data[6]) << 48; - NINJA_FALLTHROUGH; - case 6: h ^= uint64_t(data[5]) << 40; - NINJA_FALLTHROUGH; - case 5: h ^= uint64_t(data[4]) << 32; - NINJA_FALLTHROUGH; - case 4: h ^= uint64_t(data[3]) << 24; - NINJA_FALLTHROUGH; - case 3: h ^= uint64_t(data[2]) << 16; - NINJA_FALLTHROUGH; - case 2: h ^= uint64_t(data[1]) << 8; - NINJA_FALLTHROUGH; - case 1: h ^= uint64_t(data[0]); - h *= m; - }; - h ^= h >> r; - h *= m; - h ^= h >> r; - return h; -} -#undef BIG_CONSTANT - - -} // namespace - -// static -uint64_t BuildLog::LogEntry::HashCommand(StringPiece command) { - return MurmurHash64A(command.str_, command.len_); -} - -BuildLog::LogEntry::LogEntry(const string& output) - : output(output) {} - -BuildLog::LogEntry::LogEntry(const string& output, uint64_t command_hash, - int start_time, int end_time, TimeStamp restat_mtime) - : output(output), command_hash(command_hash), - start_time(start_time), end_time(end_time), mtime(restat_mtime) -{} - -BuildLog::BuildLog() - : log_file_(NULL), needs_recompaction_(false) {} - -BuildLog::~BuildLog() { - Close(); -} - -bool BuildLog::OpenForWrite(const string& path, const BuildLogUser& user, - string* err) { - if (needs_recompaction_) { - if (!Recompact(path, user, err)) - return false; - } - - log_file_ = fopen(path.c_str(), "ab"); - if (!log_file_) { - *err = strerror(errno); - return false; - } - setvbuf(log_file_, NULL, _IOLBF, BUFSIZ); - SetCloseOnExec(fileno(log_file_)); - - // Opening a file in append mode doesn't set the file pointer to the file's - // end on Windows. Do that explicitly. - fseek(log_file_, 0, SEEK_END); - - if (ftell(log_file_) == 0) { - if (fprintf(log_file_, kFileSignature, kCurrentVersion) < 0 ) { - *err = strerror(errno); - return false; - } - } - - return true; -} - -bool BuildLog::RecordCommand(Edge* edge, int start_time, int end_time, - TimeStamp mtime) { - string command = edge->EvaluateCommand(true); - uint64_t command_hash = LogEntry::HashCommand(command); - for (vector::iterator out = edge->outputs_.begin(); - out != edge->outputs_.end(); ++out) { - const string& path = (*out)->path(); - Entries::iterator i = entries_.find(path); - LogEntry* log_entry; - if (i != entries_.end()) { - log_entry = i->second; - } else { - log_entry = new LogEntry(path); - entries_.insert(Entries::value_type(log_entry->output, log_entry)); - } - log_entry->command_hash = command_hash; - log_entry->start_time = start_time; - log_entry->end_time = end_time; - log_entry->mtime = mtime; - - if (log_file_) { - if (!WriteEntry(log_file_, *log_entry)) - return false; - if (fflush(log_file_) != 0) { - return false; - } - } - } - return true; -} - -void BuildLog::Close() { - if (log_file_) - fclose(log_file_); - log_file_ = NULL; -} - -struct LineReader { - explicit LineReader(FILE* file) - : file_(file), buf_end_(buf_), line_start_(buf_), line_end_(NULL) { - memset(buf_, 0, sizeof(buf_)); - } - - // Reads a \n-terminated line from the file passed to the constructor. - // On return, *line_start points to the beginning of the next line, and - // *line_end points to the \n at the end of the line. If no newline is seen - // in a fixed buffer size, *line_end is set to NULL. Returns false on EOF. - bool ReadLine(char** line_start, char** line_end) { - if (line_start_ >= buf_end_ || !line_end_) { - // Buffer empty, refill. - size_t size_read = fread(buf_, 1, sizeof(buf_), file_); - if (!size_read) - return false; - line_start_ = buf_; - buf_end_ = buf_ + size_read; - } else { - // Advance to next line in buffer. - line_start_ = line_end_ + 1; - } - - line_end_ = (char*)memchr(line_start_, '\n', buf_end_ - line_start_); - if (!line_end_) { - // No newline. Move rest of data to start of buffer, fill rest. - size_t already_consumed = line_start_ - buf_; - size_t size_rest = (buf_end_ - buf_) - already_consumed; - memmove(buf_, line_start_, size_rest); - - size_t read = fread(buf_ + size_rest, 1, sizeof(buf_) - size_rest, file_); - buf_end_ = buf_ + size_rest + read; - line_start_ = buf_; - line_end_ = (char*)memchr(line_start_, '\n', buf_end_ - line_start_); - } - - *line_start = line_start_; - *line_end = line_end_; - return true; - } - - private: - FILE* file_; - char buf_[256 << 10]; - char* buf_end_; // Points one past the last valid byte in |buf_|. - - char* line_start_; - // Points at the next \n in buf_ after line_start, or NULL. - char* line_end_; -}; - -bool BuildLog::Load(const string& path, string* err) { - METRIC_RECORD(".ninja_log load"); - FILE* file = fopen(path.c_str(), "r"); - if (!file) { - if (errno == ENOENT) - return true; - *err = strerror(errno); - return false; - } - - int log_version = 0; - int unique_entry_count = 0; - int total_entry_count = 0; - - LineReader reader(file); - char* line_start = 0; - char* line_end = 0; - while (reader.ReadLine(&line_start, &line_end)) { - if (!log_version) { - sscanf(line_start, kFileSignature, &log_version); - - if (log_version < kOldestSupportedVersion) { - *err = ("build log version invalid, perhaps due to being too old; " - "starting over"); - fclose(file); - unlink(path.c_str()); - // Don't report this as a failure. An empty build log will cause - // us to rebuild the outputs anyway. - return true; - } - } - - // If no newline was found in this chunk, read the next. - if (!line_end) - continue; - if (*line_start == '#') - continue; - const char kFieldSeparator = '\t'; - - char* start = line_start; - char* end = (char*)memchr(start, kFieldSeparator, line_end - start); - if (!end) - continue; - *end = 0; - - int start_time = 0, end_time = 0; - TimeStamp restat_mtime = 0; - - start_time = atoi(start); - start = end + 1; - - end = (char*)memchr(start, kFieldSeparator, line_end - start); - if (!end) - continue; - *end = 0; - end_time = atoi(start); - start = end + 1; - - end = (char*)memchr(start, kFieldSeparator, line_end - start); - if (!end) - continue; - *end = 0; - restat_mtime = strtoll(start, NULL, 10); - start = end + 1; - - end = (char*)memchr(start, kFieldSeparator, line_end - start); - if (!end) - continue; - string output = string(start, end - start); - - start = end + 1; - end = line_end; - - LogEntry* entry; - Entries::iterator i = entries_.find(output); - if (i != entries_.end()) { - entry = i->second; - } else { - entry = new LogEntry(output); - entries_.insert(Entries::value_type(entry->output, entry)); - ++unique_entry_count; - } - ++total_entry_count; - - entry->start_time = start_time; - entry->end_time = end_time; - entry->mtime = restat_mtime; - // if (log_version >= 5) { - // The old version does not use the HASH - char c = *end; - *end = '\0'; - entry->command_hash = (uint64_t)strtoull(start, NULL, 16); - *end = c; - - // } else { - // entry->command_hash = LogEntry::HashCommand(StringPiece(start, - // end - start)); - // } - } - fclose(file); - - if (!line_start) { - return true; // file was empty - } - - // Decide whether it's time to rebuild the log: - // - if we're upgrading versions - // - if it's getting large - int kMinCompactionEntryCount = 100; - int kCompactionRatio = 3; - if (log_version < kCurrentVersion) { - needs_recompaction_ = true; - } else if (total_entry_count > kMinCompactionEntryCount && - total_entry_count > unique_entry_count * kCompactionRatio) { - needs_recompaction_ = true; - } - - return true; -} - -BuildLog::LogEntry* BuildLog::LookupByOutput(const string& path) { - Entries::iterator i = entries_.find(path); - if (i != entries_.end()) - return i->second; - return NULL; -} - -bool BuildLog::WriteEntry(FILE* f, const LogEntry& entry) { - return fprintf(f, "%d\t%d\t%" PRId64 "\t%s\t%" PRIx64 "\n", - entry.start_time, entry.end_time, entry.mtime, - entry.output.c_str(), entry.command_hash) > 0; -} - -bool BuildLog::Recompact(const string& path, const BuildLogUser& user, - string* err) { - METRIC_RECORD(".ninja_log recompact"); - - Close(); - string temp_path = path + ".recompact"; - FILE* f = fopen(temp_path.c_str(), "wb"); - if (!f) { - *err = strerror(errno); - return false; - } - - if (fprintf(f, kFileSignature, kCurrentVersion) < 0) { - *err = strerror(errno); - fclose(f); - return false; - } - - vector dead_outputs; - for (Entries::iterator i = entries_.begin(); i != entries_.end(); ++i) { - if (user.IsPathDead(i->first)) { - dead_outputs.push_back(i->first); - continue; - } - - if (!WriteEntry(f, *i->second)) { - *err = strerror(errno); - fclose(f); - return false; - } - } - - for (size_t i = 0; i < dead_outputs.size(); ++i) - entries_.erase(dead_outputs[i]); - - fclose(f); - if (unlink(path.c_str()) < 0) { - *err = strerror(errno); - return false; - } - - if (rename(temp_path.c_str(), path.c_str()) < 0) { - *err = strerror(errno); - return false; - } - - return true; -} diff --git a/ninja/src/build_log.h b/ninja/src/build_log.h deleted file mode 100644 index 5268fabb684..00000000000 --- a/ninja/src/build_log.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_BUILD_LOG_H_ -#define NINJA_BUILD_LOG_H_ - -#include -#include -using namespace std; - -#include "hash_map.h" -#include "timestamp.h" -#include "util.h" // uint64_t - -struct Edge; - -/// Can answer questions about the manifest for the BuildLog. -struct BuildLogUser { - /// Return if a given output is no longer part of the build manifest. - /// This is only called during recompaction and doesn't have to be fast. - virtual bool IsPathDead(StringPiece s) const = 0; -}; - -/// Store a log of every command ran for every build. -/// It has a few uses: -/// -/// 1) (hashes of) command lines for existing output files, so we know -/// when we need to rebuild due to the command changing -/// 2) timing information, perhaps for generating reports -/// 3) restat information -struct BuildLog { - BuildLog(); - ~BuildLog(); - - bool OpenForWrite(const string& path, const BuildLogUser& user, string* err); - bool RecordCommand(Edge* edge, int start_time, int end_time, - TimeStamp mtime = 0); - void Close(); - - /// Load the on-disk log. - bool Load(const string& path, string* err); - - struct LogEntry { - string output; - uint64_t command_hash; - int start_time; - int end_time; - TimeStamp mtime; - - static uint64_t HashCommand(StringPiece command); - - // Used by tests. - bool operator==(const LogEntry& o) { - return output == o.output && command_hash == o.command_hash && - start_time == o.start_time && end_time == o.end_time && - mtime == o.mtime; - } - - explicit LogEntry(const string& output); - LogEntry(const string& output, uint64_t command_hash, - int start_time, int end_time, TimeStamp restat_mtime); - }; - - /// Lookup a previously-run command by its output path. - LogEntry* LookupByOutput(const string& path); - - /// Serialize an entry into a log file. - bool WriteEntry(FILE* f, const LogEntry& entry); - - /// Rewrite the known log entries, throwing away old data. - bool Recompact(const string& path, const BuildLogUser& user, string* err); - - typedef ExternalStringHashMap::Type Entries; - const Entries& entries() const { return entries_; } - - private: - Entries entries_; - FILE* log_file_; - bool needs_recompaction_; -}; - -#endif // NINJA_BUILD_LOG_H_ diff --git a/ninja/src/build_log_perftest.cc b/ninja/src/build_log_perftest.cc deleted file mode 100644 index e471d138cc1..00000000000 --- a/ninja/src/build_log_perftest.cc +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include - -#include "build_log.h" -#include "graph.h" -#include "manifest_parser.h" -#include "state.h" -#include "util.h" -#include "metrics.h" - -#ifndef _WIN32 -#include -#endif - -const char kTestFilename[] = "BuildLogPerfTest-tempfile"; - -struct NoDeadPaths : public BuildLogUser { - virtual bool IsPathDead(StringPiece) const { return false; } -}; - -bool WriteTestData(string* err) { - BuildLog log; - - NoDeadPaths no_dead_paths; - if (!log.OpenForWrite(kTestFilename, no_dead_paths, err)) - return false; - - /* - A histogram of command lengths in chromium. For example, 407 builds, - 1.4% of all builds, had commands longer than 32 bytes but shorter than 64. - 32 407 1.4% - 64 183 0.6% - 128 1461 5.1% - 256 791 2.8% - 512 1314 4.6% - 1024 6114 21.3% - 2048 11759 41.0% - 4096 2056 7.2% - 8192 4567 15.9% - 16384 13 0.0% - 32768 4 0.0% - 65536 5 0.0% - The average command length is 4.1 kB and there were 28674 commands in total, - which makes for a total log size of ~120 MB (also counting output filenames). - - Based on this, write 30000 many 4 kB long command lines. - */ - - // ManifestParser is the only object allowed to create Rules. - const size_t kRuleSize = 4000; - string long_rule_command = "gcc "; - for (int i = 0; long_rule_command.size() < kRuleSize; ++i) { - char buf[80]; - sprintf(buf, "-I../../and/arbitrary/but/fairly/long/path/suffixed/%d ", i); - long_rule_command += buf; - } - long_rule_command += "$in -o $out\n"; - - State state; - ManifestParser parser(&state, NULL); - if (!parser.ParseTest("rule cxx\n command = " + long_rule_command, err)) - return false; - - // Create build edges. Using ManifestParser is as fast as using the State api - // for edge creation, so just use that. - const int kNumCommands = 30000; - string build_rules; - for (int i = 0; i < kNumCommands; ++i) { - char buf[80]; - sprintf(buf, "build input%d.o: cxx input%d.cc\n", i, i); - build_rules += buf; - } - - if (!parser.ParseTest(build_rules, err)) - return false; - - for (int i = 0; i < kNumCommands; ++i) { - log.RecordCommand(state.edges_[i], - /*start_time=*/100 * i, - /*end_time=*/100 * i + 1, - /*mtime=*/0); - } - - return true; -} - -int main() { - vector times; - string err; - - if (!WriteTestData(&err)) { - fprintf(stderr, "Failed to write test data: %s\n", err.c_str()); - return 1; - } - - { - // Read once to warm up disk cache. - BuildLog log; - if (!log.Load(kTestFilename, &err)) { - fprintf(stderr, "Failed to read test data: %s\n", err.c_str()); - return 1; - } - } - const int kNumRepetitions = 5; - for (int i = 0; i < kNumRepetitions; ++i) { - int64_t start = GetTimeMillis(); - BuildLog log; - if (!log.Load(kTestFilename, &err)) { - fprintf(stderr, "Failed to read test data: %s\n", err.c_str()); - return 1; - } - int delta = (int)(GetTimeMillis() - start); - printf("%dms\n", delta); - times.push_back(delta); - } - - int min = times[0]; - int max = times[0]; - float total = 0; - for (size_t i = 0; i < times.size(); ++i) { - total += times[i]; - if (times[i] < min) - min = times[i]; - else if (times[i] > max) - max = times[i]; - } - - printf("min %dms max %dms avg %.1fms\n", - min, max, total / times.size()); - - unlink(kTestFilename); - - return 0; -} - diff --git a/ninja/src/build_log_test.cc b/ninja/src/build_log_test.cc deleted file mode 100644 index eea818f9615..00000000000 --- a/ninja/src/build_log_test.cc +++ /dev/null @@ -1,308 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "build_log.h" - -#include "util.h" -#include "test.h" - -#include -#ifdef _WIN32 -#include -#include -#else -#include -#include -#endif - -namespace { - -const char kTestFilename[] = "BuildLogTest-tempfile"; - -struct BuildLogTest : public StateTestWithBuiltinRules, public BuildLogUser { - virtual void SetUp() { - // In case a crashing test left a stale file behind. - unlink(kTestFilename); - } - virtual void TearDown() { - unlink(kTestFilename); - } - virtual bool IsPathDead(StringPiece s) const { return false; } -}; - -TEST_F(BuildLogTest, WriteRead) { - AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n"); - - BuildLog log1; - string err; - EXPECT_TRUE(log1.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - log1.RecordCommand(state_.edges_[0], 15, 18); - log1.RecordCommand(state_.edges_[1], 20, 25); - log1.Close(); - - BuildLog log2; - EXPECT_TRUE(log2.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - - ASSERT_EQ(2u, log1.entries().size()); - ASSERT_EQ(2u, log2.entries().size()); - BuildLog::LogEntry* e1 = log1.LookupByOutput("out"); - ASSERT_TRUE(e1); - BuildLog::LogEntry* e2 = log2.LookupByOutput("out"); - ASSERT_TRUE(e2); - ASSERT_TRUE(*e1 == *e2); - ASSERT_EQ(15, e1->start_time); - ASSERT_EQ("out", e1->output); -} - -TEST_F(BuildLogTest, FirstWriteAddsSignature) { - const char kExpectedContent[] = "# ninja log vX\n" - "# start_time end_time mtime command hash\n"; - const size_t kVersionPos = 13; // Points at 'X'. - - BuildLog log; - string contents, err; - - EXPECT_TRUE(log.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - log.Close(); - - ASSERT_EQ(0, ReadFile(kTestFilename, &contents, &err)); - ASSERT_EQ("", err); - if (contents.size() >= kVersionPos) - contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedContent, contents); - - // Opening the file anew shouldn't add a second version string. - EXPECT_TRUE(log.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - log.Close(); - - contents.clear(); - ASSERT_EQ(0, ReadFile(kTestFilename, &contents, &err)); - ASSERT_EQ("", err); - if (contents.size() >= kVersionPos) - contents[kVersionPos] = 'X'; - EXPECT_EQ(kExpectedContent, contents); -} - -TEST_F(BuildLogTest, DoubleEntry) { - FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "0\t1\t2\tout\tcommand abc\n"); - fprintf(f, "3\t4\t5\tout\tcommand def\n"); - fclose(f); - - string err; - BuildLog log; - EXPECT_TRUE(log.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - - BuildLog::LogEntry* e = log.LookupByOutput("out"); - ASSERT_TRUE(e); - ASSERT_NO_FATAL_FAILURE(AssertHash("command def", e->command_hash)); -} - -TEST_F(BuildLogTest, Truncate) { - AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n"); - - { - BuildLog log1; - string err; - EXPECT_TRUE(log1.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - log1.RecordCommand(state_.edges_[0], 15, 18); - log1.RecordCommand(state_.edges_[1], 20, 25); - log1.Close(); - } - - struct stat statbuf; - ASSERT_EQ(0, stat(kTestFilename, &statbuf)); - ASSERT_GT(statbuf.st_size, 0); - - // For all possible truncations of the input file, assert that we don't - // crash when parsing. - for (off_t size = statbuf.st_size; size > 0; --size) { - BuildLog log2; - string err; - EXPECT_TRUE(log2.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - log2.RecordCommand(state_.edges_[0], 15, 18); - log2.RecordCommand(state_.edges_[1], 20, 25); - log2.Close(); - - ASSERT_TRUE(Truncate(kTestFilename, size, &err)); - - BuildLog log3; - err.clear(); - ASSERT_TRUE(log3.Load(kTestFilename, &err) || !err.empty()); - } -} - -TEST_F(BuildLogTest, ObsoleteOldVersion) { - FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v3\n"); - fprintf(f, "123 456 0 out command\n"); - fclose(f); - - string err; - BuildLog log; - EXPECT_TRUE(log.Load(kTestFilename, &err)); - ASSERT_NE(err.find("version"), string::npos); -} - -TEST_F(BuildLogTest, SpacesInOutputV4) { - FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "123\t456\t456\tout with space\tcommand\n"); - fclose(f); - - string err; - BuildLog log; - EXPECT_TRUE(log.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - - BuildLog::LogEntry* e = log.LookupByOutput("out with space"); - ASSERT_TRUE(e); - ASSERT_EQ(123, e->start_time); - ASSERT_EQ(456, e->end_time); - ASSERT_EQ(456, e->mtime); - ASSERT_NO_FATAL_FAILURE(AssertHash("command", e->command_hash)); -} - -TEST_F(BuildLogTest, DuplicateVersionHeader) { - // Old versions of ninja accidentally wrote multiple version headers to the - // build log on Windows. This shouldn't crash, and the second version header - // should be ignored. - FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "123\t456\t456\tout\tcommand\n"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "456\t789\t789\tout2\tcommand2\n"); - fclose(f); - - string err; - BuildLog log; - EXPECT_TRUE(log.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - - BuildLog::LogEntry* e = log.LookupByOutput("out"); - ASSERT_TRUE(e); - ASSERT_EQ(123, e->start_time); - ASSERT_EQ(456, e->end_time); - ASSERT_EQ(456, e->mtime); - ASSERT_NO_FATAL_FAILURE(AssertHash("command", e->command_hash)); - - e = log.LookupByOutput("out2"); - ASSERT_TRUE(e); - ASSERT_EQ(456, e->start_time); - ASSERT_EQ(789, e->end_time); - ASSERT_EQ(789, e->mtime); - ASSERT_NO_FATAL_FAILURE(AssertHash("command2", e->command_hash)); -} - -TEST_F(BuildLogTest, VeryLongInputLine) { - // Ninja's build log buffer is currently 256kB. Lines longer than that are - // silently ignored, but don't affect parsing of other lines. - FILE* f = fopen(kTestFilename, "wb"); - fprintf(f, "# ninja log v4\n"); - fprintf(f, "123\t456\t456\tout\tcommand start"); - for (size_t i = 0; i < (512 << 10) / strlen(" more_command"); ++i) - fputs(" more_command", f); - fprintf(f, "\n"); - fprintf(f, "456\t789\t789\tout2\tcommand2\n"); - fclose(f); - - string err; - BuildLog log; - EXPECT_TRUE(log.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - - BuildLog::LogEntry* e = log.LookupByOutput("out"); - ASSERT_EQ(NULL, e); - - e = log.LookupByOutput("out2"); - ASSERT_TRUE(e); - ASSERT_EQ(456, e->start_time); - ASSERT_EQ(789, e->end_time); - ASSERT_EQ(789, e->mtime); - ASSERT_NO_FATAL_FAILURE(AssertHash("command2", e->command_hash)); -} - -TEST_F(BuildLogTest, MultiTargetEdge) { - AssertParse(&state_, -"build out out.d: cat\n"); - - BuildLog log; - log.RecordCommand(state_.edges_[0], 21, 22); - - ASSERT_EQ(2u, log.entries().size()); - BuildLog::LogEntry* e1 = log.LookupByOutput("out"); - ASSERT_TRUE(e1); - BuildLog::LogEntry* e2 = log.LookupByOutput("out.d"); - ASSERT_TRUE(e2); - ASSERT_EQ("out", e1->output); - ASSERT_EQ("out.d", e2->output); - ASSERT_EQ(21, e1->start_time); - ASSERT_EQ(21, e2->start_time); - ASSERT_EQ(22, e2->end_time); - ASSERT_EQ(22, e2->end_time); -} - -struct BuildLogRecompactTest : public BuildLogTest { - virtual bool IsPathDead(StringPiece s) const { return s == "out2"; } -}; - -TEST_F(BuildLogRecompactTest, Recompact) { - AssertParse(&state_, -"build out: cat in\n" -"build out2: cat in\n"); - - BuildLog log1; - string err; - EXPECT_TRUE(log1.OpenForWrite(kTestFilename, *this, &err)); - ASSERT_EQ("", err); - // Record the same edge several times, to trigger recompaction - // the next time the log is opened. - for (int i = 0; i < 200; ++i) - log1.RecordCommand(state_.edges_[0], 15, 18 + i); - log1.RecordCommand(state_.edges_[1], 21, 22); - log1.Close(); - - // Load... - BuildLog log2; - EXPECT_TRUE(log2.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - ASSERT_EQ(2u, log2.entries().size()); - ASSERT_TRUE(log2.LookupByOutput("out")); - ASSERT_TRUE(log2.LookupByOutput("out2")); - // ...and force a recompaction. - EXPECT_TRUE(log2.OpenForWrite(kTestFilename, *this, &err)); - log2.Close(); - - // "out2" is dead, it should've been removed. - BuildLog log3; - EXPECT_TRUE(log2.Load(kTestFilename, &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, log2.entries().size()); - ASSERT_TRUE(log2.LookupByOutput("out")); - ASSERT_FALSE(log2.LookupByOutput("out2")); -} - -} // anonymous namespace diff --git a/ninja/src/build_test.cc b/ninja/src/build_test.cc deleted file mode 100644 index b5dbc6c1f62..00000000000 --- a/ninja/src/build_test.cc +++ /dev/null @@ -1,3079 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "build.h" - -#include - -#include "build_log.h" -#include "deps_log.h" -#include "graph.h" -#include "test.h" - -struct CompareEdgesByOutput { - static bool cmp(const Edge* a, const Edge* b) { - return a->outputs_[0]->path() < b->outputs_[0]->path(); - } -}; - -/// Fixture for tests involving Plan. -// Though Plan doesn't use State, it's useful to have one around -// to create Nodes and Edges. -struct PlanTest : public StateTestWithBuiltinRules { - Plan plan_; - - /// Because FindWork does not return Edges in any sort of predictable order, - // provide a means to get available Edges in order and in a format which is - // easy to write tests around. - void FindWorkSorted(deque* ret, int count) { - for (int i = 0; i < count; ++i) { - ASSERT_TRUE(plan_.more_to_do()); - Edge* edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ret->push_back(edge); - } - ASSERT_FALSE(plan_.FindWork()); - sort(ret->begin(), ret->end(), CompareEdgesByOutput::cmp); - } - - void TestPoolWithDepthOne(const char *test_case); -}; - -TEST_F(PlanTest, Basic) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n")); - GetNode("mid")->MarkDirty(); - GetNode("out")->MarkDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("mid", edge->outputs_[0]->path()); - - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("mid", edge->inputs_[0]->path()); - ASSERT_EQ("out", edge->outputs_[0]->path()); - - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - ASSERT_FALSE(plan_.more_to_do()); - edge = plan_.FindWork(); - ASSERT_EQ(0, edge); -} - -// Test that two outputs from one rule can be handled as inputs to the next. -TEST_F(PlanTest, DoubleOutputDirect) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid1 mid2\n" -"build mid1 mid2: cat in\n")); - GetNode("mid1")->MarkDirty(); - GetNode("mid2")->MarkDirty(); - GetNode("out")->MarkDirty(); - - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid1 mid2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -// Test that two outputs from one rule can eventually be routed to another. -TEST_F(PlanTest, DoubleOutputIndirect) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat b1 b2\n" -"build b1: cat a1\n" -"build b2: cat a2\n" -"build a1 a2: cat in\n")); - GetNode("a1")->MarkDirty(); - GetNode("a2")->MarkDirty(); - GetNode("b1")->MarkDirty(); - GetNode("b2")->MarkDirty(); - GetNode("out")->MarkDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a1 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat b1 b2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -// Test that two edges from one output can both execute. -TEST_F(PlanTest, DoubleDependent) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat a1 a2\n" -"build a1: cat mid\n" -"build a2: cat mid\n" -"build mid: cat in\n")); - GetNode("mid")->MarkDirty(); - GetNode("a1")->MarkDirty(); - GetNode("a2")->MarkDirty(); - GetNode("out")->MarkDirty(); - - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge; - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat in - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat mid - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); // cat a1 a2 - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); // done -} - -void PlanTest::TestPoolWithDepthOne(const char* test_case) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, test_case)); - GetNode("out1")->MarkDirty(); - GetNode("out2")->MarkDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out1"), &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(plan_.AddTarget(GetNode("out2"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("out1", edge->outputs_[0]->path()); - - // This will be false since poolcat is serialized - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("out2", edge->outputs_[0]->path()); - - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - ASSERT_FALSE(plan_.more_to_do()); - edge = plan_.FindWork(); - ASSERT_EQ(0, edge); -} - -TEST_F(PlanTest, PoolWithDepthOne) { - TestPoolWithDepthOne( -"pool foobar\n" -" depth = 1\n" -"rule poolcat\n" -" command = cat $in > $out\n" -" pool = foobar\n" -"build out1: poolcat in\n" -"build out2: poolcat in\n"); -} - -TEST_F(PlanTest, ConsolePool) { - TestPoolWithDepthOne( -"rule poolcat\n" -" command = cat $in > $out\n" -" pool = console\n" -"build out1: poolcat in\n" -"build out2: poolcat in\n"); -} - -TEST_F(PlanTest, PoolsWithDepthTwo) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"pool foobar\n" -" depth = 2\n" -"pool bazbin\n" -" depth = 2\n" -"rule foocat\n" -" command = cat $in > $out\n" -" pool = foobar\n" -"rule bazcat\n" -" command = cat $in > $out\n" -" pool = bazbin\n" -"build out1: foocat in\n" -"build out2: foocat in\n" -"build out3: foocat in\n" -"build outb1: bazcat in\n" -"build outb2: bazcat in\n" -"build outb3: bazcat in\n" -" pool =\n" -"build allTheThings: cat out1 out2 out3 outb1 outb2 outb3\n" -)); - // Mark all the out* nodes dirty - for (int i = 0; i < 3; ++i) { - GetNode("out" + string(1, '1' + static_cast(i)))->MarkDirty(); - GetNode("outb" + string(1, '1' + static_cast(i)))->MarkDirty(); - } - GetNode("allTheThings")->MarkDirty(); - - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("allTheThings"), &err)); - ASSERT_EQ("", err); - - deque edges; - FindWorkSorted(&edges, 5); - - for (int i = 0; i < 4; ++i) { - Edge *edge = edges[i]; - ASSERT_EQ("in", edge->inputs_[0]->path()); - string base_name(i < 2 ? "out" : "outb"); - ASSERT_EQ(base_name + string(1, '1' + (i % 2)), edge->outputs_[0]->path()); - } - - // outb3 is exempt because it has an empty pool - Edge* edge = edges[4]; - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("outb3", edge->outputs_[0]->path()); - - // finish out1 - plan_.EdgeFinished(edges.front(), Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - edges.pop_front(); - - // out3 should be available - Edge* out3 = plan_.FindWork(); - ASSERT_TRUE(out3); - ASSERT_EQ("in", out3->inputs_[0]->path()); - ASSERT_EQ("out3", out3->outputs_[0]->path()); - - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(out3, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - ASSERT_FALSE(plan_.FindWork()); - - for (deque::iterator it = edges.begin(); it != edges.end(); ++it) { - plan_.EdgeFinished(*it, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - } - - Edge* last = plan_.FindWork(); - ASSERT_TRUE(last); - ASSERT_EQ("allTheThings", last->outputs_[0]->path()); - - plan_.EdgeFinished(last, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - ASSERT_FALSE(plan_.more_to_do()); - ASSERT_FALSE(plan_.FindWork()); -} - -TEST_F(PlanTest, PoolWithRedundantEdges) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "pool compile\n" - " depth = 1\n" - "rule gen_foo\n" - " command = touch foo.cpp\n" - "rule gen_bar\n" - " command = touch bar.cpp\n" - "rule echo\n" - " command = echo $out > $out\n" - "build foo.cpp.obj: echo foo.cpp || foo.cpp\n" - " pool = compile\n" - "build bar.cpp.obj: echo bar.cpp || bar.cpp\n" - " pool = compile\n" - "build libfoo.a: echo foo.cpp.obj bar.cpp.obj\n" - "build foo.cpp: gen_foo\n" - "build bar.cpp: gen_bar\n" - "build all: phony libfoo.a\n")); - GetNode("foo.cpp")->MarkDirty(); - GetNode("foo.cpp.obj")->MarkDirty(); - GetNode("bar.cpp")->MarkDirty(); - GetNode("bar.cpp.obj")->MarkDirty(); - GetNode("libfoo.a")->MarkDirty(); - GetNode("all")->MarkDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("all"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge = NULL; - - deque initial_edges; - FindWorkSorted(&initial_edges, 2); - - edge = initial_edges[1]; // Foo first - ASSERT_EQ("foo.cpp", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_FALSE(plan_.FindWork()); - ASSERT_EQ("foo.cpp", edge->inputs_[0]->path()); - ASSERT_EQ("foo.cpp", edge->inputs_[1]->path()); - ASSERT_EQ("foo.cpp.obj", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = initial_edges[0]; // Now for bar - ASSERT_EQ("bar.cpp", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_FALSE(plan_.FindWork()); - ASSERT_EQ("bar.cpp", edge->inputs_[0]->path()); - ASSERT_EQ("bar.cpp", edge->inputs_[1]->path()); - ASSERT_EQ("bar.cpp.obj", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_FALSE(plan_.FindWork()); - ASSERT_EQ("foo.cpp.obj", edge->inputs_[0]->path()); - ASSERT_EQ("bar.cpp.obj", edge->inputs_[1]->path()); - ASSERT_EQ("libfoo.a", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_FALSE(plan_.FindWork()); - ASSERT_EQ("libfoo.a", edge->inputs_[0]->path()); - ASSERT_EQ("all", edge->outputs_[0]->path()); - plan_.EdgeFinished(edge, Plan::kEdgeSucceeded, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_FALSE(edge); - ASSERT_FALSE(plan_.more_to_do()); -} - -TEST_F(PlanTest, PoolWithFailingEdge) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "pool foobar\n" - " depth = 1\n" - "rule poolcat\n" - " command = cat $in > $out\n" - " pool = foobar\n" - "build out1: poolcat in\n" - "build out2: poolcat in\n")); - GetNode("out1")->MarkDirty(); - GetNode("out2")->MarkDirty(); - string err; - EXPECT_TRUE(plan_.AddTarget(GetNode("out1"), &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(plan_.AddTarget(GetNode("out2"), &err)); - ASSERT_EQ("", err); - ASSERT_TRUE(plan_.more_to_do()); - - Edge* edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("out1", edge->outputs_[0]->path()); - - // This will be false since poolcat is serialized - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(edge, Plan::kEdgeFailed, &err); - ASSERT_EQ("", err); - - edge = plan_.FindWork(); - ASSERT_TRUE(edge); - ASSERT_EQ("in", edge->inputs_[0]->path()); - ASSERT_EQ("out2", edge->outputs_[0]->path()); - - ASSERT_FALSE(plan_.FindWork()); - - plan_.EdgeFinished(edge, Plan::kEdgeFailed, &err); - ASSERT_EQ("", err); - - ASSERT_TRUE(plan_.more_to_do()); // Jobs have failed - edge = plan_.FindWork(); - ASSERT_EQ(0, edge); -} - -/// Fake implementation of CommandRunner, useful for tests. -struct FakeCommandRunner : public CommandRunner { - explicit FakeCommandRunner(VirtualFileSystem* fs) : - max_active_edges_(1), fs_(fs) {} - - // CommandRunner impl - virtual bool CanRunMore(); - virtual bool StartCommand(Edge* edge); - virtual bool WaitForCommand(Result* result); - virtual vector GetActiveEdges(); - virtual void Abort(); - - vector commands_ran_; - vector active_edges_; - size_t max_active_edges_; - VirtualFileSystem* fs_; -}; - -struct BuildTest : public StateTestWithBuiltinRules, public BuildLogUser { - BuildTest() : config_(MakeConfig()), command_runner_(&fs_), - builder_(&state_, config_, NULL, NULL, &fs_), - status_(config_) { - } - - virtual void SetUp() { - StateTestWithBuiltinRules::SetUp(); - - builder_.command_runner_.reset(&command_runner_); - AssertParse(&state_, -"build cat1: cat in1\n" -"build cat2: cat in1 in2\n" -"build cat12: cat cat1 cat2\n"); - - fs_.Create("in1", ""); - fs_.Create("in2", ""); - } - - ~BuildTest() { - builder_.command_runner_.release(); - } - - virtual bool IsPathDead(StringPiece s) const { return false; } - - /// Rebuild target in the 'working tree' (fs_). - /// State of command_runner_ and logs contents (if specified) ARE MODIFIED. - /// Handy to check for NOOP builds, and higher-level rebuild tests. - void RebuildTarget(const string& target, const char* manifest, - const char* log_path = NULL, const char* deps_path = NULL, - State* state = NULL); - - // Mark a path dirty. - void Dirty(const string& path); - - BuildConfig MakeConfig() { - BuildConfig config; - config.verbosity = BuildConfig::QUIET; - return config; - } - - BuildConfig config_; - FakeCommandRunner command_runner_; - VirtualFileSystem fs_; - Builder builder_; - - BuildStatus status_; -}; - -void BuildTest::RebuildTarget(const string& target, const char* manifest, - const char* log_path, const char* deps_path, - State* state) { - State local_state, *pstate = &local_state; - if (state) - pstate = state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(pstate)); - AssertParse(pstate, manifest); - - string err; - BuildLog build_log, *pbuild_log = NULL; - if (log_path) { - ASSERT_TRUE(build_log.Load(log_path, &err)); - ASSERT_TRUE(build_log.OpenForWrite(log_path, *this, &err)); - ASSERT_EQ("", err); - pbuild_log = &build_log; - } - - DepsLog deps_log, *pdeps_log = NULL; - if (deps_path) { - ASSERT_TRUE(deps_log.Load(deps_path, pstate, &err)); - ASSERT_TRUE(deps_log.OpenForWrite(deps_path, &err)); - ASSERT_EQ("", err); - pdeps_log = &deps_log; - } - - Builder builder(pstate, config_, pbuild_log, pdeps_log, &fs_); - EXPECT_TRUE(builder.AddTarget(target, &err)); - - command_runner_.commands_ran_.clear(); - builder.command_runner_.reset(&command_runner_); - if (!builder.AlreadyUpToDate()) { - bool build_res = builder.Build(&err); - EXPECT_TRUE(build_res); - } - builder.command_runner_.release(); -} - -bool FakeCommandRunner::CanRunMore() { - return active_edges_.size() < max_active_edges_; -} - -bool FakeCommandRunner::StartCommand(Edge* edge) { - assert(active_edges_.size() < max_active_edges_); - assert(find(active_edges_.begin(), active_edges_.end(), edge) - == active_edges_.end()); - commands_ran_.push_back(edge->EvaluateCommand()); - if (edge->rule().name() == "cat" || - edge->rule().name() == "cat_rsp" || - edge->rule().name() == "cat_rsp_out" || - edge->rule().name() == "cc" || - edge->rule().name() == "touch" || - edge->rule().name() == "touch-interrupt" || - edge->rule().name() == "touch-fail-tick2") { - for (vector::iterator out = edge->outputs_.begin(); - out != edge->outputs_.end(); ++out) { - fs_->Create((*out)->path(), ""); - } - } else if (edge->rule().name() == "true" || - edge->rule().name() == "fail" || - edge->rule().name() == "interrupt" || - edge->rule().name() == "console") { - // Don't do anything. - } else if (edge->rule().name() == "cp") { - assert(!edge->inputs_.empty()); - assert(edge->outputs_.size() == 1); - string content; - string err; - if (fs_->ReadFile(edge->inputs_[0]->path(), &content, &err) == - DiskInterface::Okay) - fs_->WriteFile(edge->outputs_[0]->path(), content); - } else { - printf("unknown command\n"); - return false; - } - - active_edges_.push_back(edge); - - // Allow tests to control the order by the name of the first output. - sort(active_edges_.begin(), active_edges_.end(), - CompareEdgesByOutput::cmp); - - return true; -} - -bool FakeCommandRunner::WaitForCommand(Result* result) { - if (active_edges_.empty()) - return false; - - // All active edges were already completed immediately when started, - // so we can pick any edge here. Pick the last edge. Tests can - // control the order of edges by the name of the first output. - vector::iterator edge_iter = active_edges_.end() - 1; - - Edge* edge = *edge_iter; - result->edge = edge; - - if (edge->rule().name() == "interrupt" || - edge->rule().name() == "touch-interrupt") { - result->status = ExitInterrupted; - return true; - } - - if (edge->rule().name() == "console") { - if (edge->use_console()) - result->status = ExitSuccess; - else - result->status = ExitFailure; - active_edges_.erase(edge_iter); - return true; - } - - if (edge->rule().name() == "fail" || - (edge->rule().name() == "touch-fail-tick2" && fs_->now_ == 2)) - result->status = ExitFailure; - else - result->status = ExitSuccess; - - // Provide a way for test cases to verify when an edge finishes that - // some other edge is still active. This is useful for test cases - // covering behavior involving multiple active edges. - const string& verify_active_edge = edge->GetBinding("verify_active_edge"); - if (!verify_active_edge.empty()) { - bool verify_active_edge_found = false; - for (vector::iterator i = active_edges_.begin(); - i != active_edges_.end(); ++i) { - if ((*i)->outputs_.size() >= 1 && - (*i)->outputs_[0]->path() == verify_active_edge) { - verify_active_edge_found = true; - } - } - EXPECT_TRUE(verify_active_edge_found); - } - - active_edges_.erase(edge_iter); - return true; -} - -vector FakeCommandRunner::GetActiveEdges() { - return active_edges_; -} - -void FakeCommandRunner::Abort() { - active_edges_.clear(); -} - -void BuildTest::Dirty(const string& path) { - Node* node = GetNode(path); - node->MarkDirty(); - - // If it's an input file, mark that we've already stat()ed it and - // it's missing. - if (!node->in_edge()) - node->MarkMissing(); -} - -TEST_F(BuildTest, NoWork) { - string err; - EXPECT_TRUE(builder_.AlreadyUpToDate()); -} - -TEST_F(BuildTest, OneStep) { - // Given a dirty target with one ready input, - // we should rebuild the target. - Dirty("cat1"); - string err; - EXPECT_TRUE(builder_.AddTarget("cat1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cat in1 > cat1", command_runner_.commands_ran_[0]); -} - -TEST_F(BuildTest, OneStep2) { - // Given a target with one dirty input, - // we should rebuild the target. - Dirty("cat1"); - string err; - EXPECT_TRUE(builder_.AddTarget("cat1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cat in1 > cat1", command_runner_.commands_ran_[0]); -} - -TEST_F(BuildTest, TwoStep) { - string err; - EXPECT_TRUE(builder_.AddTarget("cat12", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - // Depending on how the pointers work out, we could've ran - // the first two commands in either order. - EXPECT_TRUE((command_runner_.commands_ran_[0] == "cat in1 > cat1" && - command_runner_.commands_ran_[1] == "cat in1 in2 > cat2") || - (command_runner_.commands_ran_[1] == "cat in1 > cat1" && - command_runner_.commands_ran_[0] == "cat in1 in2 > cat2")); - - EXPECT_EQ("cat cat1 cat2 > cat12", command_runner_.commands_ran_[2]); - - fs_.Tick(); - - // Modifying in2 requires rebuilding one intermediate file - // and the final file. - fs_.Create("in2", ""); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("cat12", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(5u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cat in1 in2 > cat2", command_runner_.commands_ran_[3]); - EXPECT_EQ("cat cat1 cat2 > cat12", command_runner_.commands_ran_[4]); -} - -TEST_F(BuildTest, TwoOutputs) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build out1 out2: touch in.txt\n")); - - fs_.Create("in.txt", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("touch out1 out2", command_runner_.commands_ran_[0]); -} - -TEST_F(BuildTest, ImplicitOutput) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"build out | out.imp: touch in.txt\n")); - fs_.Create("in.txt", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out.imp", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[0]); -} - -// Test case from -// https://github.com/ninja-build/ninja/issues/148 -TEST_F(BuildTest, MultiOutIn) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build in1 otherfile: touch in\n" -"build out: touch in | in1\n")); - - fs_.Create("in", ""); - fs_.Tick(); - fs_.Create("in1", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); -} - -TEST_F(BuildTest, Chain) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build c2: cat c1\n" -"build c3: cat c2\n" -"build c4: cat c3\n" -"build c5: cat c4\n")); - - fs_.Create("c1", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(4u, command_runner_.commands_ran_.size()); - - err.clear(); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); - - fs_.Tick(); - - fs_.Create("c3", ""); - err.clear(); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("c5", &err)); - ASSERT_EQ("", err); - EXPECT_FALSE(builder_.AlreadyUpToDate()); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); // 3->4, 4->5 -} - -TEST_F(BuildTest, MissingInput) { - // Input is referenced by build file, but no rule for it. - string err; - Dirty("in1"); - EXPECT_FALSE(builder_.AddTarget("cat1", &err)); - EXPECT_EQ("'in1', needed by 'cat1', missing and no known rule to make it", - err); -} - -TEST_F(BuildTest, MissingTarget) { - // Target is not referenced by build file. - string err; - EXPECT_FALSE(builder_.AddTarget("meow", &err)); - EXPECT_EQ("unknown target: 'meow'", err); -} - -TEST_F(BuildTest, MakeDirs) { - string err; - -#ifdef _WIN32 - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "build subdir\\dir2\\file: cat in1\n")); -#else - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "build subdir/dir2/file: cat in1\n")); -#endif - EXPECT_TRUE(builder_.AddTarget("subdir/dir2/file", &err)); - - EXPECT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(2u, fs_.directories_made_.size()); - EXPECT_EQ("subdir", fs_.directories_made_[0]); - EXPECT_EQ("subdir/dir2", fs_.directories_made_[1]); -} - -TEST_F(BuildTest, DepFileMissing) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build fo$ o.o: cc foo.c\n")); - fs_.Create("foo.c", ""); - - EXPECT_TRUE(builder_.AddTarget("fo o.o", &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, fs_.files_read_.size()); - EXPECT_EQ("fo o.o.d", fs_.files_read_[0]); -} - -TEST_F(BuildTest, DepFileOK) { - string err; - int orig_edges = state_.edges_.size(); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c\n")); - Edge* edge = state_.edges_.back(); - - fs_.Create("foo.c", ""); - GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing. - fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n"); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, fs_.files_read_.size()); - EXPECT_EQ("foo.o.d", fs_.files_read_[0]); - - // Expect three new edges: one generating foo.o, and two more from - // loading the depfile. - ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size()); - // Expect our edge to now have three inputs: foo.c and two headers. - ASSERT_EQ(3u, edge->inputs_.size()); - - // Expect the command line we generate to only use the original input. - ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); -} - -TEST_F(BuildTest, DepFileParseError) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c\n")); - fs_.Create("foo.c", ""); - fs_.Create("foo.o.d", "randomtext\n"); - EXPECT_FALSE(builder_.AddTarget("foo.o", &err)); - EXPECT_EQ("foo.o.d: expected ':' in depfile", err); -} - -TEST_F(BuildTest, EncounterReadyTwice) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build c: touch\n" -"build b: touch || c\n" -"build a: touch | b || c\n")); - - vector c_out = GetNode("c")->out_edges(); - ASSERT_EQ(2u, c_out.size()); - EXPECT_EQ("b", c_out[0]->outputs_[0]->path()); - EXPECT_EQ("a", c_out[1]->outputs_[0]->path()); - - fs_.Create("b", ""); - EXPECT_TRUE(builder_.AddTarget("a", &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, OrderOnlyDeps) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build foo.o: cc foo.c || otherfile\n")); - Edge* edge = state_.edges_.back(); - - fs_.Create("foo.c", ""); - fs_.Create("otherfile", ""); - fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n"); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - ASSERT_EQ("", err); - - // One explicit, two implicit, one order only. - ASSERT_EQ(4u, edge->inputs_.size()); - EXPECT_EQ(2, edge->implicit_deps_); - EXPECT_EQ(1, edge->order_only_deps_); - // Verify the inputs are in the order we expect - // (explicit then implicit then orderonly). - EXPECT_EQ("foo.c", edge->inputs_[0]->path()); - EXPECT_EQ("blah.h", edge->inputs_[1]->path()); - EXPECT_EQ("bar.h", edge->inputs_[2]->path()); - EXPECT_EQ("otherfile", edge->inputs_[3]->path()); - - // Expect the command line we generate to only use the original input. - ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); - - // explicit dep dirty, expect a rebuild. - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - fs_.Tick(); - - // Recreate the depfile, as it should have been deleted by the build. - fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n"); - - // implicit dep dirty, expect a rebuild. - fs_.Create("blah.h", ""); - fs_.Create("bar.h", ""); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - fs_.Tick(); - - // Recreate the depfile, as it should have been deleted by the build. - fs_.Create("foo.o.d", "foo.o: blah.h bar.h\n"); - - // order only dep dirty, no rebuild. - fs_.Create("otherfile", ""); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); - - // implicit dep missing, expect rebuild. - fs_.RemoveFile("bar.h"); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, RebuildOrderOnlyDeps) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n" -"rule true\n command = true\n" -"build oo.h: cc oo.h.in\n" -"build foo.o: cc foo.c || oo.h\n")); - - fs_.Create("foo.c", ""); - fs_.Create("oo.h.in", ""); - - // foo.o and order-only dep dirty, build both. - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // all clean, no rebuild. - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); - - // order-only dep missing, build it only. - fs_.RemoveFile("oo.h"); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - ASSERT_EQ("cc oo.h.in", command_runner_.commands_ran_[0]); - - fs_.Tick(); - - // order-only dep dirty, build it only. - fs_.Create("oo.h.in", ""); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("foo.o", &err)); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - ASSERT_EQ("cc oo.h.in", command_runner_.commands_ran_[0]); -} - -#ifdef _WIN32 -TEST_F(BuildTest, DepFileCanonicalize) { - string err; - int orig_edges = state_.edges_.size(); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n command = cc $in\n depfile = $out.d\n" -"build gen/stuff\\things/foo.o: cc x\\y/z\\foo.c\n")); - Edge* edge = state_.edges_.back(); - - fs_.Create("x/y/z/foo.c", ""); - GetNode("bar.h")->MarkDirty(); // Mark bar.h as missing. - // Note, different slashes from manifest. - fs_.Create("gen/stuff\\things/foo.o.d", - "gen\\stuff\\things\\foo.o: blah.h bar.h\n"); - EXPECT_TRUE(builder_.AddTarget("gen/stuff/things/foo.o", &err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, fs_.files_read_.size()); - // The depfile path does not get Canonicalize as it seems unnecessary. - EXPECT_EQ("gen/stuff\\things/foo.o.d", fs_.files_read_[0]); - - // Expect three new edges: one generating foo.o, and two more from - // loading the depfile. - ASSERT_EQ(orig_edges + 3, (int)state_.edges_.size()); - // Expect our edge to now have three inputs: foo.c and two headers. - ASSERT_EQ(3u, edge->inputs_.size()); - - // Expect the command line we generate to only use the original input, and - // using the slashes from the manifest. - ASSERT_EQ("cc x\\y/z\\foo.c", edge->EvaluateCommand()); -} -#endif - -TEST_F(BuildTest, Phony) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat bar.cc\n" -"build all: phony out\n")); - fs_.Create("bar.cc", ""); - - EXPECT_TRUE(builder_.AddTarget("all", &err)); - ASSERT_EQ("", err); - - // Only one command to run, because phony runs no command. - EXPECT_FALSE(builder_.AlreadyUpToDate()); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, PhonyNoWork) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat bar.cc\n" -"build all: phony out\n")); - fs_.Create("bar.cc", ""); - fs_.Create("out", ""); - - EXPECT_TRUE(builder_.AddTarget("all", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); -} - -// Test a self-referencing phony. Ideally this should not work, but -// ninja 1.7 and below tolerated and CMake 2.8.12.x and 3.0.x both -// incorrectly produce it. We tolerate it for compatibility. -TEST_F(BuildTest, PhonySelfReference) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build a: phony a\n")); - - EXPECT_TRUE(builder_.AddTarget("a", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); -} - -TEST_F(BuildTest, Fail) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule fail\n" -" command = fail\n" -"build out1: fail\n")); - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - ASSERT_EQ("subcommand failed", err); -} - -TEST_F(BuildTest, SwallowFailures) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule fail\n" -" command = fail\n" -"build out1: fail\n" -"build out2: fail\n" -"build out3: fail\n" -"build all: phony out1 out2 out3\n")); - - // Swallow two failures, die on the third. - config_.failures_allowed = 3; - - string err; - EXPECT_TRUE(builder_.AddTarget("all", &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - ASSERT_EQ("subcommands failed", err); -} - -TEST_F(BuildTest, SwallowFailuresLimit) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule fail\n" -" command = fail\n" -"build out1: fail\n" -"build out2: fail\n" -"build out3: fail\n" -"build final: cat out1 out2 out3\n")); - - // Swallow ten failures; we should stop before building final. - config_.failures_allowed = 11; - - string err; - EXPECT_TRUE(builder_.AddTarget("final", &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - ASSERT_EQ("cannot make progress due to previous errors", err); -} - -TEST_F(BuildTest, SwallowFailuresPool) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"pool failpool\n" -" depth = 1\n" -"rule fail\n" -" command = fail\n" -" pool = failpool\n" -"build out1: fail\n" -"build out2: fail\n" -"build out3: fail\n" -"build final: cat out1 out2 out3\n")); - - // Swallow ten failures; we should stop before building final. - config_.failures_allowed = 11; - - string err; - EXPECT_TRUE(builder_.AddTarget("final", &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - ASSERT_EQ("cannot make progress due to previous errors", err); -} - -TEST_F(BuildTest, PoolEdgesReadyButNotWanted) { - fs_.Create("x", ""); - - const char* manifest = - "pool some_pool\n" - " depth = 4\n" - "rule touch\n" - " command = touch $out\n" - " pool = some_pool\n" - "rule cc\n" - " command = touch grit\n" - "\n" - "build B.d.stamp: cc | x\n" - "build C.stamp: touch B.d.stamp\n" - "build final.stamp: touch || C.stamp\n"; - - RebuildTarget("final.stamp", manifest); - - fs_.RemoveFile("B.d.stamp"); - - State save_state; - RebuildTarget("final.stamp", manifest, NULL, NULL, &save_state); - EXPECT_GE(save_state.LookupPool("some_pool")->current_use(), 0); -} - -struct BuildWithLogTest : public BuildTest { - BuildWithLogTest() { - builder_.SetBuildLog(&build_log_); - } - - BuildLog build_log_; -}; - -TEST_F(BuildWithLogTest, NotInLogButOnDisk) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc\n" -"build out1: cc in\n")); - - // Create input/output that would be considered up to date when - // not considering the command line hash. - fs_.Create("in", ""); - fs_.Create("out1", ""); - string err; - - // Because it's not in the log, it should not be up-to-date until - // we build again. - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_FALSE(builder_.AlreadyUpToDate()); - - command_runner_.commands_ran_.clear(); - state_.Reset(); - - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_TRUE(builder_.AlreadyUpToDate()); -} - -TEST_F(BuildWithLogTest, RebuildAfterFailure) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch-fail-tick2\n" -" command = touch-fail-tick2\n" -"build out1: touch-fail-tick2 in\n")); - - string err; - - fs_.Create("in", ""); - - // Run once successfully to get out1 in the log - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - - command_runner_.commands_ran_.clear(); - state_.Reset(); - builder_.Cleanup(); - builder_.plan_.Reset(); - - fs_.Tick(); - fs_.Create("in", ""); - - // Run again with a failure that updates the output file timestamp - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("subcommand failed", err); - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - - command_runner_.commands_ran_.clear(); - state_.Reset(); - builder_.Cleanup(); - builder_.plan_.Reset(); - - fs_.Tick(); - - // Run again, should rerun even though the output file is up to date on disk - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_FALSE(builder_.AlreadyUpToDate()); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("", err); -} - -TEST_F(BuildWithLogTest, RebuildWithNoInputs) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch\n" -"build out1: touch\n" -"build out2: touch in\n")); - - string err; - - fs_.Create("in", ""); - - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - EXPECT_EQ(2u, command_runner_.commands_ran_.size()); - - command_runner_.commands_ran_.clear(); - state_.Reset(); - - fs_.Tick(); - - fs_.Create("in", ""); - - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildWithLogTest, RestatTest) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule true\n" -" command = true\n" -" restat = 1\n" -"rule cc\n" -" command = cc\n" -" restat = 1\n" -"build out1: cc in\n" -"build out2: true out1\n" -"build out3: cat out2\n")); - - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Create("out3", ""); - - fs_.Tick(); - - fs_.Create("in", ""); - - // Do a pre-build so that there's commands in the log for the outputs, - // otherwise, the lack of an entry in the build log will cause out3 to rebuild - // regardless of restat. - string err; - EXPECT_TRUE(builder_.AddTarget("out3", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - EXPECT_EQ("[3/3]", builder_.status_->FormatProgressStatus("[%s/%t]", - BuildStatus::kEdgeStarted)); - command_runner_.commands_ran_.clear(); - state_.Reset(); - - fs_.Tick(); - - fs_.Create("in", ""); - // "cc" touches out1, so we should build out2. But because "true" does not - // touch out2, we should cancel the build of out3. - EXPECT_TRUE(builder_.AddTarget("out3", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // If we run again, it should be a no-op, because the build log has recorded - // that we've already built out2 with an input timestamp of 2 (from out1). - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out3", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); - - fs_.Tick(); - - fs_.Create("in", ""); - - // The build log entry should not, however, prevent us from rebuilding out2 - // if out1 changes. - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out3", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildWithLogTest, RestatMissingFile) { - // If a restat rule doesn't create its output, and the output didn't - // exist before the rule was run, consider that behavior equivalent - // to a rule that doesn't modify its existent output file. - - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule true\n" -" command = true\n" -" restat = 1\n" -"rule cc\n" -" command = cc\n" -"build out1: true in\n" -"build out2: cc out1\n")); - - fs_.Create("in", ""); - fs_.Create("out2", ""); - - // Do a pre-build so that there's commands in the log for the outputs, - // otherwise, the lack of an entry in the build log will cause out2 to rebuild - // regardless of restat. - string err; - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - command_runner_.commands_ran_.clear(); - state_.Reset(); - - fs_.Tick(); - fs_.Create("in", ""); - fs_.Create("out2", ""); - - // Run a build, expect only the first command to run. - // It doesn't touch its output (due to being the "true" command), so - // we shouldn't run the dependent build. - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildWithLogTest, RestatSingleDependentOutputDirty) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "rule true\n" - " command = true\n" - " restat = 1\n" - "rule touch\n" - " command = touch\n" - "build out1: true in\n" - "build out2 out3: touch out1\n" - "build out4: touch out2\n" - )); - - // Create the necessary files - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out4", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - - fs_.Tick(); - fs_.Create("in", ""); - fs_.RemoveFile("out3"); - - // Since "in" is missing, out1 will be built. Since "out3" is missing, - // out2 and out3 will be built even though "in" is not touched when built. - // Then, since out2 is rebuilt, out4 should be rebuilt -- the restat on the - // "true" rule should not lead to the "touch" edge writing out2 and out3 being - // cleard. - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out4", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); -} - -// Test scenario, in which an input file is removed, but output isn't changed -// https://github.com/ninja-build/ninja/issues/295 -TEST_F(BuildWithLogTest, RestatMissingInput) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "rule true\n" - " command = true\n" - " depfile = $out.d\n" - " restat = 1\n" - "rule cc\n" - " command = cc\n" - "build out1: true in\n" - "build out2: cc out1\n")); - - // Create all necessary files - fs_.Create("in", ""); - - // The implicit dependencies and the depfile itself - // are newer than the output - TimeStamp restat_mtime = fs_.Tick(); - fs_.Create("out1.d", "out1: will.be.deleted restat.file\n"); - fs_.Create("will.be.deleted", ""); - fs_.Create("restat.file", ""); - - // Run the build, out1 and out2 get built - string err; - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // See that an entry in the logfile is created, capturing - // the right mtime - BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out1"); - ASSERT_TRUE(NULL != log_entry); - ASSERT_EQ(restat_mtime, log_entry->mtime); - - // Now remove a file, referenced from depfile, so that target becomes - // dirty, but the output does not change - fs_.RemoveFile("will.be.deleted"); - - // Trigger the build again - only out1 gets built - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - // Check that the logfile entry remains correctly set - log_entry = build_log_.LookupByOutput("out1"); - ASSERT_TRUE(NULL != log_entry); - ASSERT_EQ(restat_mtime, log_entry->mtime); -} - -struct BuildDryRun : public BuildWithLogTest { - BuildDryRun() { - config_.dry_run = true; - } -}; - -TEST_F(BuildDryRun, AllCommandsShown) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule true\n" -" command = true\n" -" restat = 1\n" -"rule cc\n" -" command = cc\n" -" restat = 1\n" -"build out1: cc in\n" -"build out2: true out1\n" -"build out3: cat out2\n")); - - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Create("out3", ""); - - fs_.Tick(); - - fs_.Create("in", ""); - - // "cc" touches out1, so we should build out2. But because "true" does not - // touch out2, we should cancel the build of out3. - string err; - EXPECT_TRUE(builder_.AddTarget("out3", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); -} - -// Test that RSP files are created when & where appropriate and deleted after -// successful execution. -TEST_F(BuildTest, RspFileSuccess) -{ - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "rule cat_rsp\n" - " command = cat $rspfile > $out\n" - " rspfile = $rspfile\n" - " rspfile_content = $long_command\n" - "rule cat_rsp_out\n" - " command = cat $rspfile > $out\n" - " rspfile = $out.rsp\n" - " rspfile_content = $long_command\n" - "build out1: cat in\n" - "build out2: cat_rsp in\n" - " rspfile = out 2.rsp\n" - " long_command = Some very long command\n" - "build out$ 3: cat_rsp_out in\n" - " long_command = Some very long command\n")); - - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Create("out 3", ""); - - fs_.Tick(); - - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AddTarget("out 3", &err)); - ASSERT_EQ("", err); - - size_t files_created = fs_.files_created_.size(); - size_t files_removed = fs_.files_removed_.size(); - - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - - // The RSP files were created - ASSERT_EQ(files_created + 2, fs_.files_created_.size()); - ASSERT_EQ(1u, fs_.files_created_.count("out 2.rsp")); - ASSERT_EQ(1u, fs_.files_created_.count("out 3.rsp")); - - // The RSP files were removed - ASSERT_EQ(files_removed + 2, fs_.files_removed_.size()); - ASSERT_EQ(1u, fs_.files_removed_.count("out 2.rsp")); - ASSERT_EQ(1u, fs_.files_removed_.count("out 3.rsp")); -} - -// Test that RSP file is created but not removed for commands, which fail -TEST_F(BuildTest, RspFileFailure) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "rule fail\n" - " command = fail\n" - " rspfile = $rspfile\n" - " rspfile_content = $long_command\n" - "build out: fail in\n" - " rspfile = out.rsp\n" - " long_command = Another very long command\n")); - - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - - size_t files_created = fs_.files_created_.size(); - size_t files_removed = fs_.files_removed_.size(); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ("subcommand failed", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - // The RSP file was created - ASSERT_EQ(files_created + 1, fs_.files_created_.size()); - ASSERT_EQ(1u, fs_.files_created_.count("out.rsp")); - - // The RSP file was NOT removed - ASSERT_EQ(files_removed, fs_.files_removed_.size()); - ASSERT_EQ(0u, fs_.files_removed_.count("out.rsp")); - - // The RSP file contains what it should - ASSERT_EQ("Another very long command", fs_.files_["out.rsp"].contents); -} - -// Test that contents of the RSP file behaves like a regular part of -// command line, i.e. triggers a rebuild if changed -TEST_F(BuildWithLogTest, RspFileCmdLineChange) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "rule cat_rsp\n" - " command = cat $rspfile > $out\n" - " rspfile = $rspfile\n" - " rspfile_content = $long_command\n" - "build out: cat_rsp in\n" - " rspfile = out.rsp\n" - " long_command = Original very long command\n")); - - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - - // 1. Build for the 1st time (-> populate log) - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - // 2. Build again (no change) - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - ASSERT_TRUE(builder_.AlreadyUpToDate()); - - // 3. Alter the entry in the logfile - // (to simulate a change in the command line between 2 builds) - BuildLog::LogEntry* log_entry = build_log_.LookupByOutput("out"); - ASSERT_TRUE(NULL != log_entry); - ASSERT_NO_FATAL_FAILURE(AssertHash( - "cat out.rsp > out;rspfile=Original very long command", - log_entry->command_hash)); - log_entry->command_hash++; // Change the command hash to something else. - // Now expect the target to be rebuilt - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, InterruptCleanup) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule interrupt\n" -" command = interrupt\n" -"rule touch-interrupt\n" -" command = touch-interrupt\n" -"build out1: interrupt in1\n" -"build out2: touch-interrupt in2\n")); - - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Tick(); - fs_.Create("in1", ""); - fs_.Create("in2", ""); - - // An untouched output of an interrupted command should be retained. - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_EQ("", err); - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("interrupted by user", err); - builder_.Cleanup(); - EXPECT_GT(fs_.Stat("out1", &err), 0); - err = ""; - - // A touched output of an interrupted command should be deleted. - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - EXPECT_EQ("", err); - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("interrupted by user", err); - builder_.Cleanup(); - EXPECT_EQ(0, fs_.Stat("out2", &err)); -} - -TEST_F(BuildTest, StatFailureAbortsBuild) { - const string kTooLongToStat(400, 'i'); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -("build " + kTooLongToStat + ": cat in\n").c_str())); - fs_.Create("in", ""); - - // This simulates a stat failure: - fs_.files_[kTooLongToStat].mtime = -1; - fs_.files_[kTooLongToStat].stat_error = "stat failed"; - - string err; - EXPECT_FALSE(builder_.AddTarget(kTooLongToStat, &err)); - EXPECT_EQ("stat failed", err); -} - -TEST_F(BuildTest, PhonyWithNoInputs) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build nonexistent: phony\n" -"build out1: cat || nonexistent\n" -"build out2: cat nonexistent\n")); - fs_.Create("out1", ""); - fs_.Create("out2", ""); - - // out1 should be up to date even though its input is dirty, because its - // order-only dependency has nothing to do. - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.AlreadyUpToDate()); - - // out2 should still be out of date though, because its input is dirty. - err.clear(); - command_runner_.commands_ran_.clear(); - state_.Reset(); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, DepsGccWithEmptyDepfileErrorsOut) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc\n" -" deps = gcc\n" -"build out: cc\n")); - Dirty("out"); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_FALSE(builder_.AlreadyUpToDate()); - - EXPECT_FALSE(builder_.Build(&err)); - ASSERT_EQ("subcommand failed", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, StatusFormatElapsed) { - status_.BuildStarted(); - // Before any task is done, the elapsed time must be zero. - EXPECT_EQ("[%/e0.000]", - status_.FormatProgressStatus("[%%/e%e]", - BuildStatus::kEdgeStarted)); -} - -TEST_F(BuildTest, StatusFormatReplacePlaceholder) { - EXPECT_EQ("[%/s0/t0/r0/u0/f0]", - status_.FormatProgressStatus("[%%/s%s/t%t/r%r/u%u/f%f]", - BuildStatus::kEdgeStarted)); -} - -TEST_F(BuildTest, FailedDepsParse) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build bad_deps.o: cat in1\n" -" deps = gcc\n" -" depfile = in1.d\n")); - - string err; - EXPECT_TRUE(builder_.AddTarget("bad_deps.o", &err)); - ASSERT_EQ("", err); - - // These deps will fail to parse, as they should only have one - // path to the left of the colon. - fs_.Create("in1.d", "AAA BBB"); - - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("subcommand failed", err); -} - -/// Tests of builds involving deps logs necessarily must span -/// multiple builds. We reuse methods on BuildTest but not the -/// builder_ it sets up, because we want pristine objects for -/// each build. -struct BuildWithDepsLogTest : public BuildTest { - BuildWithDepsLogTest() {} - - virtual void SetUp() { - BuildTest::SetUp(); - - temp_dir_.CreateAndEnter("BuildWithDepsLogTest"); - } - - virtual void TearDown() { - temp_dir_.Cleanup(); - } - - ScopedTempDir temp_dir_; - - /// Shadow parent class builder_ so we don't accidentally use it. - void* builder_; -}; - -/// Run a straightforwad build where the deps log is used. -TEST_F(BuildWithDepsLogTest, Straightforward) { - string err; - // Note: in1 was created by the superclass SetUp(). - const char* manifest = - "build out: cat in1\n" - " deps = gcc\n" - " depfile = in1.d\n"; - { - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Run the build once, everything should be ok. - DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - fs_.Create("in1.d", "out: in2"); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - // The deps file should have been removed. - EXPECT_EQ(0, fs_.Stat("in1.d", &err)); - // Recreate it for the next step. - fs_.Create("in1.d", "out: in2"); - deps_log.Close(); - builder.command_runner_.release(); - } - - { - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Touch the file only mentioned in the deps. - fs_.Tick(); - fs_.Create("in2", ""); - - // Run the build again. - DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - command_runner_.commands_ran_.clear(); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - // We should have rebuilt the output due to in2 being - // out of date. - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - - builder.command_runner_.release(); - } -} - -/// Verify that obsolete dependency info causes a rebuild. -/// 1) Run a successful build where everything has time t, record deps. -/// 2) Move input/output to time t+1 -- despite files in alignment, -/// should still need to rebuild due to deps at older time. -TEST_F(BuildWithDepsLogTest, ObsoleteDeps) { - string err; - // Note: in1 was created by the superclass SetUp(). - const char* manifest = - "build out: cat in1\n" - " deps = gcc\n" - " depfile = in1.d\n"; - { - // Run an ordinary build that gathers dependencies. - fs_.Create("in1", ""); - fs_.Create("in1.d", "out: "); - - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Run the build once, everything should be ok. - DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - deps_log.Close(); - builder.command_runner_.release(); - } - - // Push all files one tick forward so that only the deps are out - // of date. - fs_.Tick(); - fs_.Create("in1", ""); - fs_.Create("out", ""); - - // The deps file should have been removed, so no need to timestamp it. - EXPECT_EQ(0, fs_.Stat("in1.d", &err)); - - { - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - command_runner_.commands_ran_.clear(); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - - // Recreate the deps file here because the build expects them to exist. - fs_.Create("in1.d", "out: "); - - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - // We should have rebuilt the output due to the deps being - // out of date. - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - - builder.command_runner_.release(); - } -} - -TEST_F(BuildWithDepsLogTest, DepsIgnoredInDryRun) { - const char* manifest = - "build out: cat in1\n" - " deps = gcc\n" - " depfile = in1.d\n"; - - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("in1", ""); - - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // The deps log is NULL in dry runs. - config_.dry_run = true; - Builder builder(&state, config_, NULL, NULL, &fs_); - builder.command_runner_.reset(&command_runner_); - command_runner_.commands_ran_.clear(); - - string err; - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - - builder.command_runner_.release(); -} - -/// Check that a restat rule generating a header cancels compilations correctly. -TEST_F(BuildTest, RestatDepfileDependency) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule true\n" -" command = true\n" // Would be "write if out-of-date" in reality. -" restat = 1\n" -"build header.h: true header.in\n" -"build out: cat in1\n" -" depfile = in1.d\n")); - - fs_.Create("header.h", ""); - fs_.Create("in1.d", "out: header.h"); - fs_.Tick(); - fs_.Create("header.in", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); -} - -/// Check that a restat rule generating a header cancels compilations correctly, -/// depslog case. -TEST_F(BuildWithDepsLogTest, RestatDepfileDependencyDepsLog) { - string err; - // Note: in1 was created by the superclass SetUp(). - const char* manifest = - "rule true\n" - " command = true\n" // Would be "write if out-of-date" in reality. - " restat = 1\n" - "build header.h: true header.in\n" - "build out: cat in1\n" - " deps = gcc\n" - " depfile = in1.d\n"; - { - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Run the build once, everything should be ok. - DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - fs_.Create("in1.d", "out: header.h"); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - deps_log.Close(); - builder.command_runner_.release(); - } - - { - State state; - ASSERT_NO_FATAL_FAILURE(AddCatRule(&state)); - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Touch the input of the restat rule. - fs_.Tick(); - fs_.Create("header.in", ""); - - // Run the build again. - DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - command_runner_.commands_ran_.clear(); - EXPECT_TRUE(builder.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - // Rule "true" should have run again, but the build of "out" should have - // been cancelled due to restat propagating through the depfile header. - EXPECT_EQ(1u, command_runner_.commands_ran_.size()); - - builder.command_runner_.release(); - } -} - -TEST_F(BuildWithDepsLogTest, DepFileOKDepsLog) { - string err; - const char* manifest = - "rule cc\n command = cc $in\n depfile = $out.d\n deps = gcc\n" - "build fo$ o.o: cc foo.c\n"; - - fs_.Create("foo.c", ""); - - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Run the build once, everything should be ok. - DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - EXPECT_TRUE(builder.AddTarget("fo o.o", &err)); - ASSERT_EQ("", err); - fs_.Create("fo o.o.d", "fo\\ o.o: blah.h bar.h\n"); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - deps_log.Close(); - builder.command_runner_.release(); - } - - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - - Edge* edge = state.edges_.back(); - - state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing. - EXPECT_TRUE(builder.AddTarget("fo o.o", &err)); - ASSERT_EQ("", err); - - // Expect three new edges: one generating fo o.o, and two more from - // loading the depfile. - ASSERT_EQ(3u, state.edges_.size()); - // Expect our edge to now have three inputs: foo.c and two headers. - ASSERT_EQ(3u, edge->inputs_.size()); - - // Expect the command line we generate to only use the original input. - ASSERT_EQ("cc foo.c", edge->EvaluateCommand()); - - deps_log.Close(); - builder.command_runner_.release(); - } -} - -#ifdef _WIN32 -TEST_F(BuildWithDepsLogTest, DepFileDepsLogCanonicalize) { - string err; - const char* manifest = - "rule cc\n command = cc $in\n depfile = $out.d\n deps = gcc\n" - "build a/b\\c\\d/e/fo$ o.o: cc x\\y/z\\foo.c\n"; - - fs_.Create("x/y/z/foo.c", ""); - - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - // Run the build once, everything should be ok. - DepsLog deps_log; - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err)); - ASSERT_EQ("", err); - // Note, different slashes from manifest. - fs_.Create("a/b\\c\\d/e/fo o.o.d", - "a\\b\\c\\d\\e\\fo\\ o.o: blah.h bar.h\n"); - EXPECT_TRUE(builder.Build(&err)); - EXPECT_EQ("", err); - - deps_log.Close(); - builder.command_runner_.release(); - } - - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, manifest)); - - DepsLog deps_log; - ASSERT_TRUE(deps_log.Load("ninja_deps", &state, &err)); - ASSERT_TRUE(deps_log.OpenForWrite("ninja_deps", &err)); - ASSERT_EQ("", err); - - Builder builder(&state, config_, NULL, &deps_log, &fs_); - builder.command_runner_.reset(&command_runner_); - - Edge* edge = state.edges_.back(); - - state.GetNode("bar.h", 0)->MarkDirty(); // Mark bar.h as missing. - EXPECT_TRUE(builder.AddTarget("a/b/c/d/e/fo o.o", &err)); - ASSERT_EQ("", err); - - // Expect three new edges: one generating fo o.o, and two more from - // loading the depfile. - ASSERT_EQ(3u, state.edges_.size()); - // Expect our edge to now have three inputs: foo.c and two headers. - ASSERT_EQ(3u, edge->inputs_.size()); - - // Expect the command line we generate to only use the original input. - // Note, slashes from manifest, not .d. - ASSERT_EQ("cc x\\y/z\\foo.c", edge->EvaluateCommand()); - - deps_log.Close(); - builder.command_runner_.release(); - } -} -#endif - -/// Check that a restat rule doesn't clear an edge if the depfile is missing. -/// Follows from: https://github.com/ninja-build/ninja/issues/603 -TEST_F(BuildTest, RestatMissingDepfile) { -const char* manifest = -"rule true\n" -" command = true\n" // Would be "write if out-of-date" in reality. -" restat = 1\n" -"build header.h: true header.in\n" -"build out: cat header.h\n" -" depfile = out.d\n"; - - fs_.Create("header.h", ""); - fs_.Tick(); - fs_.Create("out", ""); - fs_.Create("header.in", ""); - - // Normally, only 'header.h' would be rebuilt, as - // its rule doesn't touch the output and has 'restat=1' set. - // But we are also missing the depfile for 'out', - // which should force its command to run anyway! - RebuildTarget("out", manifest); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); -} - -/// Check that a restat rule doesn't clear an edge if the deps are missing. -/// https://github.com/ninja-build/ninja/issues/603 -TEST_F(BuildWithDepsLogTest, RestatMissingDepfileDepslog) { - string err; - const char* manifest = -"rule true\n" -" command = true\n" // Would be "write if out-of-date" in reality. -" restat = 1\n" -"build header.h: true header.in\n" -"build out: cat header.h\n" -" deps = gcc\n" -" depfile = out.d\n"; - - // Build once to populate ninja deps logs from out.d - fs_.Create("header.in", ""); - fs_.Create("out.d", "out: header.h"); - fs_.Create("header.h", ""); - - RebuildTarget("out", manifest, "build_log", "ninja_deps"); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // Sanity: this rebuild should be NOOP - RebuildTarget("out", manifest, "build_log", "ninja_deps"); - ASSERT_EQ(0u, command_runner_.commands_ran_.size()); - - // Touch 'header.in', blank dependencies log (create a different one). - // Building header.h triggers 'restat' outputs cleanup. - // Validate that out is rebuilt netherless, as deps are missing. - fs_.Tick(); - fs_.Create("header.in", ""); - - // (switch to a new blank deps_log "ninja_deps2") - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // Sanity: this build should be NOOP - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); - ASSERT_EQ(0u, command_runner_.commands_ran_.size()); - - // Check that invalidating deps by target timestamp also works here - // Repeat the test but touch target instead of blanking the log. - fs_.Tick(); - fs_.Create("header.in", ""); - fs_.Create("out", ""); - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - - // And this build should be NOOP again - RebuildTarget("out", manifest, "build_log", "ninja_deps2"); - ASSERT_EQ(0u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, WrongOutputInDepfileCausesRebuild) { - string err; - const char* manifest = -"rule cc\n" -" command = cc $in\n" -" depfile = $out.d\n" -"build foo.o: cc foo.c\n"; - - fs_.Create("foo.c", ""); - fs_.Create("foo.o", ""); - fs_.Create("header.h", ""); - fs_.Create("foo.o.d", "bar.o.d: header.h\n"); - - RebuildTarget("foo.o", manifest, "build_log", "ninja_deps"); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, Console) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule console\n" -" command = console\n" -" pool = console\n" -"build cons: console in.txt\n")); - - fs_.Create("in.txt", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("cons", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); -} - -TEST_F(BuildTest, DyndepMissingAndNoRule) { - // Verify that we can diagnose when a dyndep file is missing and - // has no rule to build it. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - - string err; - EXPECT_FALSE(builder_.AddTarget("out", &err)); - EXPECT_EQ("loading 'dd': No such file or directory", err); -} - -TEST_F(BuildTest, DyndepReadyImplicitConnection) { - // Verify that a dyndep file can be loaded immediately to discover - // that one edge has an implicit output that is also an implicit - // input of another edge. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"build tmp: touch || dd\n" -" dyndep = dd\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out | out.imp: dyndep | tmp.imp\n" -"build tmp | tmp.imp: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[1]); -} - -TEST_F(BuildTest, DyndepReadySyntaxError) { - // Verify that a dyndep file can be loaded immediately to discover - // and reject a syntax error in it. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - fs_.Create("dd", -"build out: dyndep\n" -); - - string err; - EXPECT_FALSE(builder_.AddTarget("out", &err)); - EXPECT_EQ("dd:1: expected 'ninja_dyndep_version = ...'\n", err); -} - -TEST_F(BuildTest, DyndepReadyCircular) { - // Verify that a dyndep file can be loaded immediately to discover - // and reject a circular dependency. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" dyndep = dd\n" -"build in: r circ\n" - )); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out | circ: dyndep\n" - ); - fs_.Create("out", ""); - - string err; - EXPECT_FALSE(builder_.AddTarget("out", &err)); - EXPECT_EQ("dependency cycle: circ -> in -> circ", err); -} - -TEST_F(BuildTest, DyndepBuild) { - // Verify that a dyndep file can be built and loaded to discover nothing. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - size_t files_created = fs_.files_created_.size(); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch out", command_runner_.commands_ran_[1]); - ASSERT_EQ(2u, fs_.files_read_.size()); - EXPECT_EQ("dd-in", fs_.files_read_[0]); - EXPECT_EQ("dd", fs_.files_read_[1]); - ASSERT_EQ(2u + files_created, fs_.files_created_.size()); - EXPECT_EQ(1u, fs_.files_created_.count("dd")); - EXPECT_EQ(1u, fs_.files_created_.count("out")); -} - -TEST_F(BuildTest, DyndepBuildSyntaxError) { - // Verify that a dyndep file can be built and loaded to discover - // and reject a syntax error in it. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - fs_.Create("dd-in", -"build out: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("dd:1: expected 'ninja_dyndep_version = ...'\n", err); -} - -TEST_F(BuildTest, DyndepBuildUnrelatedOutput) { - // Verify that a dyndep file can have dependents that do not specify - // it as their dyndep binding. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build unrelated: touch || dd\n" -"build out: touch unrelated || dd\n" -" dyndep = dd\n" - )); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch unrelated", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNewOutput) { - // Verify that a dyndep file can be built and loaded to discover - // a new output of an edge. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out: touch in || dd\n" -" dyndep = dd\n" - )); - fs_.Create("in", ""); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out | out.imp: dyndep\n" -); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(2u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[1]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNewOutputWithMultipleRules1) { - // Verify that a dyndep file can be built and loaded to discover - // a new output of an edge that is already the output of another edge. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out1 | out-twice.imp: touch in\n" -"build out2: touch in || dd\n" -" dyndep = dd\n" - )); - fs_.Create("in", ""); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out2 | out-twice.imp: dyndep\n" -); - fs_.Tick(); - fs_.Create("out1", ""); - fs_.Create("out2", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - EXPECT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("multiple rules generate out-twice.imp", err); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNewOutputWithMultipleRules2) { - // Verify that a dyndep file can be built and loaded to discover - // a new output of an edge that is already the output of another - // edge also discovered by dyndep. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd1: cp dd1-in\n" -"build out1: touch || dd1\n" -" dyndep = dd1\n" -"build dd2: cp dd2-in || dd1\n" // make order predictable for test -"build out2: touch || dd2\n" -" dyndep = dd2\n" -)); - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Create("dd1-in", -"ninja_dyndep_version = 1\n" -"build out1 | out-twice.imp: dyndep\n" -); - fs_.Create("dd2-in", ""); - fs_.Create("dd2", -"ninja_dyndep_version = 1\n" -"build out2 | out-twice.imp: dyndep\n" -); - fs_.Tick(); - fs_.Create("out1", ""); - fs_.Create("out2", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - EXPECT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - EXPECT_EQ("multiple rules generate out-twice.imp", err); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNewInput) { - // Verify that a dyndep file can be built and loaded to discover - // a new input to an edge. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build in: touch\n" -"build out: touch || dd\n" -" dyndep = dd\n" - )); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep | in\n" -); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch in", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverImplicitConnection) { - // Verify that a dyndep file can be built and loaded to discover - // that one edge has an implicit output that is also an implicit - // input of another edge. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build tmp: touch || dd\n" -" dyndep = dd\n" -"build out: touch || dd\n" -" dyndep = dd\n" -)); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out | out.imp: dyndep | tmp.imp\n" -"build tmp | tmp.imp: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNowWantEdge) { - // Verify that a dyndep file can be built and loaded to discover - // that an edge is actually wanted due to a missing implicit output. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build tmp: touch || dd\n" -" dyndep = dd\n" -"build out: touch tmp || dd\n" -" dyndep = dd\n" -)); - fs_.Create("tmp", ""); - fs_.Create("out", ""); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -"build tmp | tmp.imp: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverNowWantEdgeAndDependent) { - // Verify that a dyndep file can be built and loaded to discover - // that an edge and a dependent are actually wanted. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build tmp: touch || dd\n" -" dyndep = dd\n" -"build out: touch tmp\n" -)); - fs_.Create("tmp", ""); - fs_.Create("out", ""); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build tmp | tmp.imp: dyndep\n" -); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch tmp tmp.imp", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out out.imp", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverCircular) { - // Verify that a dyndep file can be built and loaded to discover - // and reject a circular dependency. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule r\n" -" command = unused\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out: r in || dd\n" -" depfile = out.d\n" -" dyndep = dd\n" -"build in: r || dd\n" -" dyndep = dd\n" - )); - fs_.Create("out.d", "out: inimp\n"); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out | circ: dyndep\n" -"build in: dyndep | circ\n" - ); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_FALSE(builder_.Build(&err)); - // Depending on how the pointers in Plan::ready_ work out, we could have - // discovered the cycle from either starting point. - EXPECT_TRUE(err == "dependency cycle: circ -> in -> circ" || - err == "dependency cycle: in -> circ -> in"); -} - -TEST_F(BuildWithLogTest, DyndepBuildDiscoverRestat) { - // Verify that a dyndep file can be built and loaded to discover - // that an edge has a restat binding. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule true\n" -" command = true\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd: cp dd-in\n" -"build out1: true in || dd\n" -" dyndep = dd\n" -"build out2: cat out1\n")); - - fs_.Create("out1", ""); - fs_.Create("out2", ""); - fs_.Create("dd-in", -"ninja_dyndep_version = 1\n" -"build out1: dyndep\n" -" restat = 1\n" -); - fs_.Tick(); - fs_.Create("in", ""); - - // Do a pre-build so that there's commands in the log for the outputs, - // otherwise, the lack of an entry in the build log will cause "out2" to - // rebuild regardless of restat. - string err; - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd-in dd", command_runner_.commands_ran_[0]); - EXPECT_EQ("true", command_runner_.commands_ran_[1]); - EXPECT_EQ("cat out1 > out2", command_runner_.commands_ran_[2]); - - command_runner_.commands_ran_.clear(); - state_.Reset(); - fs_.Tick(); - fs_.Create("in", ""); - - // We touched "in", so we should build "out1". But because "true" does not - // touch "out1", we should cancel the build of "out2". - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - ASSERT_EQ(1u, command_runner_.commands_ran_.size()); - EXPECT_EQ("true", command_runner_.commands_ran_[0]); -} - -TEST_F(BuildTest, DyndepBuildDiscoverScheduledEdge) { - // Verify that a dyndep file can be built and loaded to discover a - // new input that itself is an output from an edge that has already - // been scheduled but not finished. We should not re-schedule it. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build out1 | out1.imp: touch\n" -"build zdd: cp zdd-in\n" -" verify_active_edge = out1\n" // verify out1 is active when zdd is finished -"build out2: cp out1 || zdd\n" -" dyndep = zdd\n" -)); - fs_.Create("zdd-in", -"ninja_dyndep_version = 1\n" -"build out2: dyndep | out1.imp\n" -); - - // Enable concurrent builds so that we can load the dyndep file - // while another edge is still active. - command_runner_.max_active_edges_ = 2; - - // During the build "out1" and "zdd" should be built concurrently. - // The fake command runner will finish these in reverse order - // of the names of the first outputs, so "zdd" will finish first - // and we will load the dyndep file while the edge for "out1" is - // still active. This will add a new dependency on "out1.imp", - // also produced by the active edge. The builder should not - // re-schedule the already-active edge. - - string err; - EXPECT_TRUE(builder_.AddTarget("out1", &err)); - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - // Depending on how the pointers in Plan::ready_ work out, the first - // two commands may have run in either order. - EXPECT_TRUE((command_runner_.commands_ran_[0] == "touch out1 out1.imp" && - command_runner_.commands_ran_[1] == "cp zdd-in zdd") || - (command_runner_.commands_ran_[1] == "touch out1 out1.imp" && - command_runner_.commands_ran_[0] == "cp zdd-in zdd")); - EXPECT_EQ("cp out1 out2", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepTwoLevelDirect) { - // Verify that a clean dyndep file can depend on a dirty dyndep file - // and be loaded properly after the dirty one is built and loaded. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd1: cp dd1-in\n" -"build out1 | out1.imp: touch || dd1\n" -" dyndep = dd1\n" -"build dd2: cp dd2-in || dd1\n" // direct order-only dep on dd1 -"build out2: touch || dd2\n" -" dyndep = dd2\n" -)); - fs_.Create("out1.imp", ""); - fs_.Create("out2", ""); - fs_.Create("out2.imp", ""); - fs_.Create("dd1-in", -"ninja_dyndep_version = 1\n" -"build out1: dyndep\n" -); - fs_.Create("dd2-in", ""); - fs_.Create("dd2", -"ninja_dyndep_version = 1\n" -"build out2 | out2.imp: dyndep | out1.imp\n" -); - - // During the build dd1 should be built and loaded. The RecomputeDirty - // called as a result of loading dd1 should not cause dd2 to be loaded - // because the builder will never get a chance to update the build plan - // to account for dd2. Instead dd2 should only be later loaded once the - // builder recognizes that it is now ready (as its order-only dependency - // on dd1 has been satisfied). This test case verifies that each dyndep - // file is loaded to update the build graph independently. - - string err; - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch out1 out1.imp", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out2 out2.imp", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepTwoLevelIndirect) { - // Verify that dyndep files can add to an edge new implicit inputs that - // correspond to implicit outputs added to other edges by other dyndep - // files on which they (order-only) depend. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out $out.imp\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd1: cp dd1-in\n" -"build out1: touch || dd1\n" -" dyndep = dd1\n" -"build dd2: cp dd2-in || out1\n" // indirect order-only dep on dd1 -"build out2: touch || dd2\n" -" dyndep = dd2\n" -)); - fs_.Create("out1.imp", ""); - fs_.Create("out2", ""); - fs_.Create("out2.imp", ""); - fs_.Create("dd1-in", -"ninja_dyndep_version = 1\n" -"build out1 | out1.imp: dyndep\n" -); - fs_.Create("dd2-in", ""); - fs_.Create("dd2", -"ninja_dyndep_version = 1\n" -"build out2 | out2.imp: dyndep | out1.imp\n" -); - - // During the build dd1 should be built and loaded. Then dd2 should - // be built and loaded. Loading dd2 should cause the builder to - // recognize that out2 needs to be built even though it was originally - // clean without dyndep info. - - string err; - EXPECT_TRUE(builder_.AddTarget("out2", &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(3u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch out1 out1.imp", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch out2 out2.imp", command_runner_.commands_ran_[2]); -} - -TEST_F(BuildTest, DyndepTwoLevelDiscoveredReady) { - // Verify that a dyndep file can discover a new input whose - // edge also has a dyndep file that is ready to load immediately. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd0: cp dd0-in\n" -"build dd1: cp dd1-in\n" -"build in: touch\n" -"build tmp: touch || dd0\n" -" dyndep = dd0\n" -"build out: touch || dd1\n" -" dyndep = dd1\n" - )); - fs_.Create("dd1-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep | tmp\n" -); - fs_.Create("dd0-in", ""); - fs_.Create("dd0", -"ninja_dyndep_version = 1\n" -"build tmp: dyndep | in\n" -); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(4u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); - EXPECT_EQ("touch in", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch tmp", command_runner_.commands_ran_[2]); - EXPECT_EQ("touch out", command_runner_.commands_ran_[3]); -} - -TEST_F(BuildTest, DyndepTwoLevelDiscoveredDirty) { - // Verify that a dyndep file can discover a new input whose - // edge also has a dyndep file that needs to be built. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"rule cp\n" -" command = cp $in $out\n" -"build dd0: cp dd0-in\n" -"build dd1: cp dd1-in\n" -"build in: touch\n" -"build tmp: touch || dd0\n" -" dyndep = dd0\n" -"build out: touch || dd1\n" -" dyndep = dd1\n" - )); - fs_.Create("dd1-in", -"ninja_dyndep_version = 1\n" -"build out: dyndep | tmp\n" -); - fs_.Create("dd0-in", -"ninja_dyndep_version = 1\n" -"build tmp: dyndep | in\n" -); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(builder_.AddTarget("out", &err)); - EXPECT_EQ("", err); - - EXPECT_TRUE(builder_.Build(&err)); - EXPECT_EQ("", err); - ASSERT_EQ(5u, command_runner_.commands_ran_.size()); - EXPECT_EQ("cp dd1-in dd1", command_runner_.commands_ran_[0]); - EXPECT_EQ("cp dd0-in dd0", command_runner_.commands_ran_[1]); - EXPECT_EQ("touch in", command_runner_.commands_ran_[2]); - EXPECT_EQ("touch tmp", command_runner_.commands_ran_[3]); - EXPECT_EQ("touch out", command_runner_.commands_ran_[4]); -} diff --git a/ninja/src/canon_perftest.cc b/ninja/src/canon_perftest.cc deleted file mode 100644 index 03f4a2f1f95..00000000000 --- a/ninja/src/canon_perftest.cc +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include - -#include "util.h" -#include "metrics.h" - -const char kPath[] = - "../../third_party/WebKit/Source/WebCore/" - "platform/leveldb/LevelDBWriteBatch.cpp"; - -int main() { - vector times; - string err; - - char buf[200]; - size_t len = strlen(kPath); - strcpy(buf, kPath); - - for (int j = 0; j < 5; ++j) { - const int kNumRepetitions = 2000000; - int64_t start = GetTimeMillis(); - uint64_t slash_bits; - for (int i = 0; i < kNumRepetitions; ++i) { - CanonicalizePath(buf, &len, &slash_bits, &err); - } - int delta = (int)(GetTimeMillis() - start); - times.push_back(delta); - } - - int min = times[0]; - int max = times[0]; - float total = 0; - for (size_t i = 0; i < times.size(); ++i) { - total += times[i]; - if (times[i] < min) - min = times[i]; - else if (times[i] > max) - max = times[i]; - } - - printf("min %dms max %dms avg %.1fms\n", - min, max, total / times.size()); -} diff --git a/ninja/src/clean.cc b/ninja/src/clean.cc deleted file mode 100644 index ca5665c9a50..00000000000 --- a/ninja/src/clean.cc +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "clean.h" - -#include -#include - -#include "disk_interface.h" -#include "graph.h" -#include "state.h" -#include "util.h" -#include - -Cleaner::Cleaner(State* state, - const BuildConfig& config, - DiskInterface* disk_interface) - : state_(state), - config_(config), - dyndep_loader_(state, disk_interface), - removed_(), - cleaned_(), - cleaned_files_count_(0), - disk_interface_(disk_interface), - status_(0) { -} - -int Cleaner::RemoveFile(const string& path) { - return disk_interface_->RemoveFile(path); -} - -bool Cleaner::FileExists(const string& path) { - string err; - TimeStamp mtime = disk_interface_->Stat(path, &err); - if (mtime == -1) - Error("%s", err.c_str()); - return mtime > 0; // Treat Stat() errors as "file does not exist". -} - -void Cleaner::Report(const string& path) { - ++cleaned_files_count_; - if (IsVerbose()) - printf("Remove %s\n", path.c_str()); -} - -void Cleaner::Remove(const string& path) { - if (!IsAlreadyRemoved(path)) { - removed_.insert(path); - if (config_.dry_run) { - if (FileExists(path)) - Report(path); - } else { - int ret = RemoveFile(path); - if (ret == 0) - Report(path); - else if (ret == -1) - status_ = 1; - } - } -} - -bool Cleaner::IsAlreadyRemoved(const string& path) { - set::iterator i = removed_.find(path); - return (i != removed_.end()); -} - -void Cleaner::RemoveEdgeFiles(Edge* edge) { - string depfile = edge->GetUnescapedDepfile(); - if (!depfile.empty()) - Remove(depfile); - - string rspfile = edge->GetUnescapedRspfile(); - if (!rspfile.empty()) - Remove(rspfile); -} - -void Cleaner::PrintHeader() { - if (config_.verbosity == BuildConfig::QUIET) - return; - printf("Cleaning..."); - if (IsVerbose()) - printf("\n"); - else - printf(" "); - fflush(stdout); -} - -void Cleaner::PrintFooter() { - if (config_.verbosity == BuildConfig::QUIET) - return; - printf("%d files.\n", cleaned_files_count_); -} - -int Cleaner::CleanAll(bool generator) { - Reset(); - PrintHeader(); - LoadDyndeps(); - for (vector::iterator e = state_->edges_.begin(); - e != state_->edges_.end(); ++e) { - // Do not try to remove phony targets - if ((*e)->is_phony()) - continue; - // Do not remove generator's files unless generator specified. - if (!generator && (*e)->GetBindingBool("generator")) - continue; - for (vector::iterator out_node = (*e)->outputs_.begin(); - out_node != (*e)->outputs_.end(); ++out_node) { - Remove((*out_node)->path()); - } - - RemoveEdgeFiles(*e); - } - PrintFooter(); - return status_; -} - -void Cleaner::CleanDead(const BuildLog::Entries& entries) { - // Reset(); - set staleFiles ; - - for (BuildLog::Entries::const_iterator i = entries.begin(); - i != entries.end(); ++i) { - Node* n = state_->LookupNode(i->first); - if (!n || !n->in_edge()) { - string toDelete = i->first.AsString(); - int ret = RemoveFile(toDelete); - if (ret == 0) { - staleFiles.insert(toDelete); - } - } - } - - if (!staleFiles.empty()) { - printf("Stale output removed\n"); - if (!state_->cleaner.empty()) { - vector stale_cm; - for (set::const_iterator i = staleFiles.begin(); - i != staleFiles.end(); ++i) { - if (StringPiece::getCmjSuffix().IsSuffix(*i)) { - stale_cm.push_back(*i); - } - } - for (size_t i = 0; i < stale_cm.size(); ++i) { - string& cmj = stale_cm[i]; - cmj[cmj.size() - 1] = 't'; - RemoveFile(cmj); - string command = state_->cleaner + " -cmt-rm " + cmj; - printf("%s\n", command.c_str()); - system(command.c_str()); - cmj.push_back('i'); - RemoveFile(cmj); - } - } - } -} - -void Cleaner::DoCleanTarget(Node* target) { - if (Edge* e = target->in_edge()) { - // Do not try to remove phony targets - if (!e->is_phony()) { - Remove(target->path()); - RemoveEdgeFiles(e); - } - for (vector::iterator n = e->inputs_.begin(); n != e->inputs_.end(); - ++n) { - Node* next = *n; - // call DoCleanTarget recursively if this node has not been visited - if (cleaned_.count(next) == 0) { - DoCleanTarget(next); - } - } - } - - // mark this target to be cleaned already - cleaned_.insert(target); -} - -int Cleaner::CleanTarget(Node* target) { - assert(target); - - Reset(); - PrintHeader(); - LoadDyndeps(); - DoCleanTarget(target); - PrintFooter(); - return status_; -} - -int Cleaner::CleanTarget(const char* target) { - assert(target); - - Reset(); - Node* node = state_->LookupNode(target); - if (node) { - CleanTarget(node); - } else { - Error("unknown target '%s'", target); - status_ = 1; - } - return status_; -} - -int Cleaner::CleanTargets(int target_count, char* targets[]) { - Reset(); - PrintHeader(); - LoadDyndeps(); - for (int i = 0; i < target_count; ++i) { - string target_name = targets[i]; - uint64_t slash_bits; - string err; - if (!CanonicalizePath(&target_name, &slash_bits, &err)) { - Error("failed to canonicalize '%s': %s", target_name.c_str(), err.c_str()); - status_ = 1; - } else { - Node* target = state_->LookupNode(target_name); - if (target) { - if (IsVerbose()) - printf("Target %s\n", target_name.c_str()); - DoCleanTarget(target); - } else { - Error("unknown target '%s'", target_name.c_str()); - status_ = 1; - } - } - } - PrintFooter(); - return status_; -} - -void Cleaner::DoCleanRule(const Rule* rule) { - assert(rule); - - for (vector::iterator e = state_->edges_.begin(); - e != state_->edges_.end(); ++e) { - if ((*e)->rule().name() == rule->name()) { - for (vector::iterator out_node = (*e)->outputs_.begin(); - out_node != (*e)->outputs_.end(); ++out_node) { - Remove((*out_node)->path()); - RemoveEdgeFiles(*e); - } - } - } -} - -int Cleaner::CleanRule(const Rule* rule) { - assert(rule); - - Reset(); - PrintHeader(); - LoadDyndeps(); - DoCleanRule(rule); - PrintFooter(); - return status_; -} - -int Cleaner::CleanRule(const char* rule) { - assert(rule); - - Reset(); - const Rule* r = state_->bindings_.LookupRule(rule); - if (r) { - CleanRule(r); - } else { - Error("unknown rule '%s'", rule); - status_ = 1; - } - return status_; -} - -int Cleaner::CleanRules(int rule_count, char* rules[]) { - assert(rules); - - Reset(); - PrintHeader(); - LoadDyndeps(); - for (int i = 0; i < rule_count; ++i) { - const char* rule_name = rules[i]; - const Rule* rule = state_->bindings_.LookupRule(rule_name); - if (rule) { - if (IsVerbose()) - printf("Rule %s\n", rule_name); - DoCleanRule(rule); - } else { - Error("unknown rule '%s'", rule_name); - status_ = 1; - } - } - PrintFooter(); - return status_; -} - -void Cleaner::Reset() { - status_ = 0; - cleaned_files_count_ = 0; - removed_.clear(); - cleaned_.clear(); -} - -void Cleaner::LoadDyndeps() { - // Load dyndep files that exist, before they are cleaned. - for (vector::iterator e = state_->edges_.begin(); - e != state_->edges_.end(); ++e) { - if (Node* dyndep = (*e)->dyndep_) { - // Capture and ignore errors loading the dyndep file. - // We clean as much of the graph as we know. - std::string err; - dyndep_loader_.LoadDyndeps(dyndep, &err); - } - } -} diff --git a/ninja/src/clean.h b/ninja/src/clean.h deleted file mode 100644 index 9ff5f973459..00000000000 --- a/ninja/src/clean.h +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_CLEAN_H_ -#define NINJA_CLEAN_H_ - -#include -#include - -#include "build.h" -#include "dyndep.h" -#include "build_log.h" - -using namespace std; - -struct State; -struct Node; -struct Rule; -struct DiskInterface; - -struct Cleaner { - /// Build a cleaner object with the given @a disk_interface - Cleaner(State* state, - const BuildConfig& config, - DiskInterface* disk_interface); - - /// Clean the given @a target and all the file built for it. - /// @return non-zero if an error occurs. - int CleanTarget(Node* target); - /// Clean the given target @a target. - /// @return non-zero if an error occurs. - int CleanTarget(const char* target); - /// Clean the given target @a targets. - /// @return non-zero if an error occurs. - int CleanTargets(int target_count, char* targets[]); - - /// Clean all built files, except for files created by generator rules. - /// @param generator If set, also clean files created by generator rules. - /// @return non-zero if an error occurs. - int CleanAll(bool generator = false); - - /// Clean all the file built with the given rule @a rule. - /// @return non-zero if an error occurs. - int CleanRule(const Rule* rule); - /// Clean the file produced by the given @a rule. - /// @return non-zero if an error occurs. - int CleanRule(const char* rule); - /// Clean the file produced by the given @a rules. - /// @return non-zero if an error occurs. - int CleanRules(int rule_count, char* rules[]); - /// Clean the files produced by previous builds that are no longer in the - /// manifest. - /// @return non-zero if an error occurs. - void CleanDead(const BuildLog::Entries& entries); - - /// @return the number of file cleaned. - int cleaned_files_count() const { - return cleaned_files_count_; - } - - /// @return whether the cleaner is in verbose mode. - bool IsVerbose() const { - return (config_.verbosity != BuildConfig::QUIET - && (config_.verbosity == BuildConfig::VERBOSE || config_.dry_run)); - } - - private: - /// Remove the file @a path. - /// @return whether the file has been removed. - int RemoveFile(const string& path); - /// @returns whether the file @a path exists. - bool FileExists(const string& path); - void Report(const string& path); - - /// Remove the given @a path file only if it has not been already removed. - void Remove(const string& path); - /// @return whether the given @a path has already been removed. - bool IsAlreadyRemoved(const string& path); - /// Remove the depfile and rspfile for an Edge. - void RemoveEdgeFiles(Edge* edge); - - /// Helper recursive method for CleanTarget(). - void DoCleanTarget(Node* target); - void PrintHeader(); - void PrintFooter(); - void DoCleanRule(const Rule* rule); - void Reset(); - - /// Load dependencies from dyndep bindings. - void LoadDyndeps(); - - State* state_; - const BuildConfig& config_; - DyndepLoader dyndep_loader_; - set removed_; - set cleaned_; - int cleaned_files_count_; - DiskInterface* disk_interface_; - int status_; -}; - -#endif // NINJA_CLEAN_H_ diff --git a/ninja/src/clean_test.cc b/ninja/src/clean_test.cc deleted file mode 100644 index 45187f4fa46..00000000000 --- a/ninja/src/clean_test.cc +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "clean.h" -#include "build.h" - -#include "test.h" - -struct CleanTest : public StateTestWithBuiltinRules { - VirtualFileSystem fs_; - BuildConfig config_; - virtual void SetUp() { - config_.verbosity = BuildConfig::QUIET; - } -}; - -TEST_F(CleanTest, CleanAll) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build in1: cat src1\n" -"build out1: cat in1\n" -"build in2: cat src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(4, cleaner.cleaned_files_count()); - EXPECT_EQ(4u, fs_.files_removed_.size()); - - // Check they are removed. - string err; - EXPECT_EQ(0, fs_.Stat("in1", &err)); - EXPECT_EQ(0, fs_.Stat("out1", &err)); - EXPECT_EQ(0, fs_.Stat("in2", &err)); - EXPECT_EQ(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanAllDryRun) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build in1: cat src1\n" -"build out1: cat in1\n" -"build in2: cat src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - config_.dry_run = true; - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(4, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); - - // Check they are not removed. - string err; - EXPECT_LT(0, fs_.Stat("in1", &err)); - EXPECT_LT(0, fs_.Stat("out1", &err)); - EXPECT_LT(0, fs_.Stat("in2", &err)); - EXPECT_LT(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(4, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanTarget) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build in1: cat src1\n" -"build out1: cat in1\n" -"build in2: cat src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); - - // Check they are removed. - string err; - EXPECT_EQ(0, fs_.Stat("in1", &err)); - EXPECT_EQ(0, fs_.Stat("out1", &err)); - EXPECT_LT(0, fs_.Stat("in2", &err)); - EXPECT_LT(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanTargetDryRun) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build in1: cat src1\n" -"build out1: cat in1\n" -"build in2: cat src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - config_.dry_run = true; - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); - - // Check they are not removed. - string err; - EXPECT_LT(0, fs_.Stat("in1", &err)); - EXPECT_LT(0, fs_.Stat("out1", &err)); - EXPECT_LT(0, fs_.Stat("in2", &err)); - EXPECT_LT(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanRule) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cat_e\n" -" command = cat -e $in > $out\n" -"build in1: cat_e src1\n" -"build out1: cat in1\n" -"build in2: cat_e src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanRule("cat_e")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); - - // Check they are removed. - string err; - EXPECT_EQ(0, fs_.Stat("in1", &err)); - EXPECT_LT(0, fs_.Stat("out1", &err)); - EXPECT_EQ(0, fs_.Stat("in2", &err)); - EXPECT_LT(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - ASSERT_EQ(0, cleaner.CleanRule("cat_e")); - EXPECT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanRuleDryRun) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cat_e\n" -" command = cat -e $in > $out\n" -"build in1: cat_e src1\n" -"build out1: cat in1\n" -"build in2: cat_e src2\n" -"build out2: cat in2\n")); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - config_.dry_run = true; - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanRule("cat_e")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); - - // Check they are not removed. - string err; - EXPECT_LT(0, fs_.Stat("in1", &err)); - EXPECT_LT(0, fs_.Stat("out1", &err)); - EXPECT_LT(0, fs_.Stat("in2", &err)); - EXPECT_LT(0, fs_.Stat("out2", &err)); - fs_.files_removed_.clear(); - - ASSERT_EQ(0, cleaner.CleanRule("cat_e")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(0u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanRuleGenerator) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule regen\n" -" command = cat $in > $out\n" -" generator = 1\n" -"build out1: cat in1\n" -"build out2: regen in2\n")); - fs_.Create("out1", ""); - fs_.Create("out2", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(1, cleaner.cleaned_files_count()); - EXPECT_EQ(1u, fs_.files_removed_.size()); - - fs_.Create("out1", ""); - - EXPECT_EQ(0, cleaner.CleanAll(/*generator=*/true)); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanDepFile) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc $in > $out\n" -" depfile = $out.d\n" -"build out1: cc in1\n")); - fs_.Create("out1", ""); - fs_.Create("out1.d", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanDepFileOnCleanTarget) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc $in > $out\n" -" depfile = $out.d\n" -"build out1: cc in1\n")); - fs_.Create("out1", ""); - fs_.Create("out1.d", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanDepFileOnCleanRule) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc $in > $out\n" -" depfile = $out.d\n" -"build out1: cc in1\n")); - fs_.Create("out1", ""); - fs_.Create("out1.d", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanRule("cc")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanDyndep) { - // Verify that a dyndep file can be loaded to discover a new output - // to be cleaned. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in || dd\n" -" dyndep = dd\n" - )); - fs_.Create("in", ""); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out | out.imp: dyndep\n" -); - fs_.Create("out", ""); - fs_.Create("out.imp", ""); - - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); - - string err; - EXPECT_EQ(0, fs_.Stat("out", &err)); - EXPECT_EQ(0, fs_.Stat("out.imp", &err)); -} - -TEST_F(CleanTest, CleanDyndepMissing) { - // Verify that a missing dyndep file is tolerated. - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in || dd\n" -" dyndep = dd\n" - )); - fs_.Create("in", ""); - fs_.Create("out", ""); - fs_.Create("out.imp", ""); - - Cleaner cleaner(&state_, config_, &fs_); - - ASSERT_EQ(0, cleaner.cleaned_files_count()); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(1, cleaner.cleaned_files_count()); - EXPECT_EQ(1u, fs_.files_removed_.size()); - - string err; - EXPECT_EQ(0, fs_.Stat("out", &err)); - EXPECT_EQ(1, fs_.Stat("out.imp", &err)); -} - -TEST_F(CleanTest, CleanRspFile) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc\n" -" command = cc $in > $out\n" -" rspfile = $rspfile\n" -" rspfile_content=$in\n" -"build out1: cc in1\n" -" rspfile = cc1.rsp\n")); - fs_.Create("out1", ""); - fs_.Create("cc1.rsp", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_EQ(2u, fs_.files_removed_.size()); -} - -TEST_F(CleanTest, CleanRsp) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cat_rsp \n" -" command = cat $rspfile > $out\n" -" rspfile = $rspfile\n" -" rspfile_content = $in\n" -"build in1: cat src1\n" -"build out1: cat in1\n" -"build in2: cat_rsp src2\n" -" rspfile=in2.rsp\n" -"build out2: cat_rsp in2\n" -" rspfile=out2.rsp\n" -)); - fs_.Create("in1", ""); - fs_.Create("out1", ""); - fs_.Create("in2.rsp", ""); - fs_.Create("out2.rsp", ""); - fs_.Create("in2", ""); - fs_.Create("out2", ""); - - Cleaner cleaner(&state_, config_, &fs_); - ASSERT_EQ(0, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanTarget("out1")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanTarget("in2")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - ASSERT_EQ(0, cleaner.CleanRule("cat_rsp")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - - EXPECT_EQ(6u, fs_.files_removed_.size()); - - // Check they are removed. - string err; - EXPECT_EQ(0, fs_.Stat("in1", &err)); - EXPECT_EQ(0, fs_.Stat("out1", &err)); - EXPECT_EQ(0, fs_.Stat("in2", &err)); - EXPECT_EQ(0, fs_.Stat("out2", &err)); - EXPECT_EQ(0, fs_.Stat("in2.rsp", &err)); - EXPECT_EQ(0, fs_.Stat("out2.rsp", &err)); -} - -TEST_F(CleanTest, CleanFailure) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, - "build dir: cat src1\n")); - fs_.MakeDir("dir"); - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_NE(0, cleaner.CleanAll()); -} - -TEST_F(CleanTest, CleanPhony) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build phony: phony t1 t2\n" -"build t1: cat\n" -"build t2: cat\n")); - - fs_.Create("phony", ""); - fs_.Create("t1", ""); - fs_.Create("t2", ""); - - // Check that CleanAll does not remove "phony". - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_LT(0, fs_.Stat("phony", &err)); - - fs_.Create("t1", ""); - fs_.Create("t2", ""); - - // Check that CleanTarget does not remove "phony". - EXPECT_EQ(0, cleaner.CleanTarget("phony")); - EXPECT_EQ(2, cleaner.cleaned_files_count()); - EXPECT_LT(0, fs_.Stat("phony", &err)); -} - -TEST_F(CleanTest, CleanDepFileAndRspFileWithSpaces) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule cc_dep\n" -" command = cc $in > $out\n" -" depfile = $out.d\n" -"rule cc_rsp\n" -" command = cc $in > $out\n" -" rspfile = $out.rsp\n" -" rspfile_content = $in\n" -"build out$ 1: cc_dep in$ 1\n" -"build out$ 2: cc_rsp in$ 1\n" -)); - fs_.Create("out 1", ""); - fs_.Create("out 2", ""); - fs_.Create("out 1.d", ""); - fs_.Create("out 2.rsp", ""); - - Cleaner cleaner(&state_, config_, &fs_); - EXPECT_EQ(0, cleaner.CleanAll()); - EXPECT_EQ(4, cleaner.cleaned_files_count()); - EXPECT_EQ(4u, fs_.files_removed_.size()); - - string err; - EXPECT_EQ(0, fs_.Stat("out 1", &err)); - EXPECT_EQ(0, fs_.Stat("out 2", &err)); - EXPECT_EQ(0, fs_.Stat("out 1.d", &err)); - EXPECT_EQ(0, fs_.Stat("out 2.rsp", &err)); -} diff --git a/ninja/src/clparser.cc b/ninja/src/clparser.cc deleted file mode 100644 index 7994c06f4ee..00000000000 --- a/ninja/src/clparser.cc +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "clparser.h" - -#include -#include -#include - -#include "metrics.h" -#include "string_piece_util.h" - -#ifdef _WIN32 -#include "includes_normalize.h" -#include "string_piece.h" -#else -#include "util.h" -#endif - -namespace { - -/// Return true if \a input ends with \a needle. -bool EndsWith(const string& input, const string& needle) { - return (input.size() >= needle.size() && - input.substr(input.size() - needle.size()) == needle); -} - -} // anonymous namespace - -// static -string CLParser::FilterShowIncludes(const string& line, - const string& deps_prefix) { - const string kDepsPrefixEnglish = "Note: including file: "; - const char* in = line.c_str(); - const char* end = in + line.size(); - const string& prefix = deps_prefix.empty() ? kDepsPrefixEnglish : deps_prefix; - if (end - in > (int)prefix.size() && - memcmp(in, prefix.c_str(), (int)prefix.size()) == 0) { - in += prefix.size(); - while (*in == ' ') - ++in; - return line.substr(in - line.c_str()); - } - return ""; -} - -// static -bool CLParser::IsSystemInclude(string path) { - transform(path.begin(), path.end(), path.begin(), ToLowerASCII); - // TODO: this is a heuristic, perhaps there's a better way? - return (path.find("program files") != string::npos || - path.find("microsoft visual studio") != string::npos); -} - -// static -bool CLParser::FilterInputFilename(string line) { - transform(line.begin(), line.end(), line.begin(), ToLowerASCII); - // TODO: other extensions, like .asm? - return EndsWith(line, ".c") || - EndsWith(line, ".cc") || - EndsWith(line, ".cxx") || - EndsWith(line, ".cpp"); -} - -// static -bool CLParser::Parse(const string& output, const string& deps_prefix, - string* filtered_output, string* err) { - METRIC_RECORD("CLParser::Parse"); - - // Loop over all lines in the output to process them. - assert(&output != filtered_output); - size_t start = 0; -#ifdef _WIN32 - IncludesNormalize normalizer("."); -#endif - - while (start < output.size()) { - size_t end = output.find_first_of("\r\n", start); - if (end == string::npos) - end = output.size(); - string line = output.substr(start, end - start); - - string include = FilterShowIncludes(line, deps_prefix); - if (!include.empty()) { - string normalized; -#ifdef _WIN32 - if (!normalizer.Normalize(include, &normalized, err)) - return false; -#else - // TODO: should this make the path relative to cwd? - normalized = include; - uint64_t slash_bits; - if (!CanonicalizePath(&normalized, &slash_bits, err)) - return false; -#endif - if (!IsSystemInclude(normalized)) - includes_.insert(normalized); - } else if (FilterInputFilename(line)) { - // Drop it. - // TODO: if we support compiling multiple output files in a single - // cl.exe invocation, we should stash the filename. - } else { - filtered_output->append(line); - filtered_output->append("\n"); - } - - if (end < output.size() && output[end] == '\r') - ++end; - if (end < output.size() && output[end] == '\n') - ++end; - start = end; - } - - return true; -} diff --git a/ninja/src/clparser.h b/ninja/src/clparser.h deleted file mode 100644 index e597e7ebc2c..00000000000 --- a/ninja/src/clparser.h +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_CLPARSER_H_ -#define NINJA_CLPARSER_H_ - -#include -#include -using namespace std; - -/// Visual Studio's cl.exe requires some massaging to work with Ninja; -/// for example, it emits include information on stderr in a funny -/// format when building with /showIncludes. This class parses this -/// output. -struct CLParser { - /// Parse a line of cl.exe output and extract /showIncludes info. - /// If a dependency is extracted, returns a nonempty string. - /// Exposed for testing. - static string FilterShowIncludes(const string& line, - const string& deps_prefix); - - /// Return true if a mentioned include file is a system path. - /// Filtering these out reduces dependency information considerably. - static bool IsSystemInclude(string path); - - /// Parse a line of cl.exe output and return true if it looks like - /// it's printing an input filename. This is a heuristic but it appears - /// to be the best we can do. - /// Exposed for testing. - static bool FilterInputFilename(string line); - - /// Parse the full output of cl, filling filtered_output with the text that - /// should be printed (if any). Returns true on success, or false with err - /// filled. output must not be the same object as filtered_object. - bool Parse(const string& output, const string& deps_prefix, - string* filtered_output, string* err); - - set includes_; -}; - -#endif // NINJA_CLPARSER_H_ diff --git a/ninja/src/clparser_perftest.cc b/ninja/src/clparser_perftest.cc deleted file mode 100644 index 7ac52302b1c..00000000000 --- a/ninja/src/clparser_perftest.cc +++ /dev/null @@ -1,157 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include - -#include "clparser.h" -#include "metrics.h" - -int main(int argc, char* argv[]) { - // Output of /showIncludes from #include - string perf_testdata = - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\iostream\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\istream\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\ostream\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\ios\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xlocnum\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\climits\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\yvals.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xkeycheck.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\crtdefs.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\sal.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\ConcurrencySal.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vadefs.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\use_ansi.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\limits.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cmath\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\math.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xtgmath.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xtr1common\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cstdlib\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\stdlib.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_malloc.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_search.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\stddef.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wstdlib.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cstdio\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\stdio.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wstdio.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_stdio_config.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\streambuf\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xiosbase\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xlocale\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cstring\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\string.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_memory.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_memcpy_s.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\errno.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime_string.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wstring.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\stdexcept\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\exception\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\type_traits\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xstddef\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cstddef\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\initializer_list\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\malloc.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime_exception.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\eh.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_terminate.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xstring\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xmemory0\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cstdint\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\stdint.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\limits\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\ymath.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cfloat\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\float.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cwchar\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\wchar.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wconio.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wctype.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wdirect.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wio.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_share.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wprocess.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\corecrt_wtime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\sys/stat.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\sys/types.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\new\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime_new.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xutility\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\utility\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\iosfwd\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\crtdbg.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime_new_debug.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xatomic0.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\intrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\setjmp.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\immintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\wmmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\nmmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\smmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\tmmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\pmmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\emmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xmmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\mmintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\ammintrin.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\mm3dnow.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\typeinfo\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime_typeinfo.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\vcruntime.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xlocinfo\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xlocinfo.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\ctype.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\locale.h\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\xfacet\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\system_error\r\n" - "Note: including file: C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE\\cerrno\r\n" - "Note: including file: C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt\\share.h\r\n"; - - for (int limit = 1 << 10; limit < (1<<20); limit *= 2) { - int64_t start = GetTimeMillis(); - for (int rep = 0; rep < limit; ++rep) { - string output; - string err; - - CLParser parser; - if (!parser.Parse(perf_testdata, "", &output, &err)) { - printf("%s\n", err.c_str()); - return 1; - } - } - int64_t end = GetTimeMillis(); - - if (end - start > 2000) { - int delta_ms = (int)(end - start); - printf("Parse %d times in %dms avg %.1fus\n", - limit, delta_ms, float(delta_ms * 1000) / limit); - break; - } - } - - return 0; -} diff --git a/ninja/src/clparser_test.cc b/ninja/src/clparser_test.cc deleted file mode 100644 index 1549ab1cb95..00000000000 --- a/ninja/src/clparser_test.cc +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "clparser.h" - -#include "test.h" -#include "util.h" - -TEST(CLParserTest, ShowIncludes) { - ASSERT_EQ("", CLParser::FilterShowIncludes("", "")); - - ASSERT_EQ("", CLParser::FilterShowIncludes("Sample compiler output", "")); - ASSERT_EQ("c:\\Some Files\\foobar.h", - CLParser::FilterShowIncludes("Note: including file: " - "c:\\Some Files\\foobar.h", "")); - ASSERT_EQ("c:\\initspaces.h", - CLParser::FilterShowIncludes("Note: including file: " - "c:\\initspaces.h", "")); - ASSERT_EQ("c:\\initspaces.h", - CLParser::FilterShowIncludes("Non-default prefix: inc file: " - "c:\\initspaces.h", - "Non-default prefix: inc file:")); -} - -TEST(CLParserTest, FilterInputFilename) { - ASSERT_TRUE(CLParser::FilterInputFilename("foobar.cc")); - ASSERT_TRUE(CLParser::FilterInputFilename("foo bar.cc")); - ASSERT_TRUE(CLParser::FilterInputFilename("baz.c")); - ASSERT_TRUE(CLParser::FilterInputFilename("FOOBAR.CC")); - - ASSERT_FALSE(CLParser::FilterInputFilename( - "src\\cl_helper.cc(166) : fatal error C1075: end " - "of file found ...")); -} - -TEST(CLParserTest, ParseSimple) { - CLParser parser; - string output, err; - ASSERT_TRUE(parser.Parse( - "foo\r\n" - "Note: inc file prefix: foo.h\r\n" - "bar\r\n", - "Note: inc file prefix:", &output, &err)); - - ASSERT_EQ("foo\nbar\n", output); - ASSERT_EQ(1u, parser.includes_.size()); - ASSERT_EQ("foo.h", *parser.includes_.begin()); -} - -TEST(CLParserTest, ParseFilenameFilter) { - CLParser parser; - string output, err; - ASSERT_TRUE(parser.Parse( - "foo.cc\r\n" - "cl: warning\r\n", - "", &output, &err)); - ASSERT_EQ("cl: warning\n", output); -} - -TEST(CLParserTest, ParseSystemInclude) { - CLParser parser; - string output, err; - ASSERT_TRUE(parser.Parse( - "Note: including file: c:\\Program Files\\foo.h\r\n" - "Note: including file: d:\\Microsoft Visual Studio\\bar.h\r\n" - "Note: including file: path.h\r\n", - "", &output, &err)); - // We should have dropped the first two includes because they look like - // system headers. - ASSERT_EQ("", output); - ASSERT_EQ(1u, parser.includes_.size()); - ASSERT_EQ("path.h", *parser.includes_.begin()); -} - -TEST(CLParserTest, DuplicatedHeader) { - CLParser parser; - string output, err; - ASSERT_TRUE(parser.Parse( - "Note: including file: foo.h\r\n" - "Note: including file: bar.h\r\n" - "Note: including file: foo.h\r\n", - "", &output, &err)); - // We should have dropped one copy of foo.h. - ASSERT_EQ("", output); - ASSERT_EQ(2u, parser.includes_.size()); -} - -TEST(CLParserTest, DuplicatedHeaderPathConverted) { - CLParser parser; - string output, err; - - // This isn't inline in the Parse() call below because the #ifdef in - // a macro expansion would confuse MSVC2013's preprocessor. - const char kInput[] = - "Note: including file: sub/./foo.h\r\n" - "Note: including file: bar.h\r\n" -#ifdef _WIN32 - "Note: including file: sub\\foo.h\r\n"; -#else - "Note: including file: sub/foo.h\r\n"; -#endif - ASSERT_TRUE(parser.Parse(kInput, "", &output, &err)); - // We should have dropped one copy of foo.h. - ASSERT_EQ("", output); - ASSERT_EQ(2u, parser.includes_.size()); -} diff --git a/ninja/src/debug_flags.cc b/ninja/src/debug_flags.cc deleted file mode 100644 index 44b14c483b5..00000000000 --- a/ninja/src/debug_flags.cc +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -bool g_explaining = false; - -bool g_keep_depfile = false; - -bool g_keep_rsp = false; - -bool g_experimental_statcache = true; diff --git a/ninja/src/debug_flags.h b/ninja/src/debug_flags.h deleted file mode 100644 index e08a43b438d..00000000000 --- a/ninja/src/debug_flags.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_EXPLAIN_H_ -#define NINJA_EXPLAIN_H_ - -#include - -#define EXPLAIN(fmt, ...) { \ - if (g_explaining) \ - fprintf(stderr, "ninja explain: " fmt "\n", __VA_ARGS__); \ -} - -extern bool g_explaining; - -extern bool g_keep_depfile; - -extern bool g_keep_rsp; - -extern bool g_experimental_statcache; - -#endif // NINJA_EXPLAIN_H_ diff --git a/ninja/src/depfile_parser.cc b/ninja/src/depfile_parser.cc deleted file mode 100644 index f774f7e21ee..00000000000 --- a/ninja/src/depfile_parser.cc +++ /dev/null @@ -1,339 +0,0 @@ -/* Generated by re2c 2.0.3 */ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "depfile_parser.h" -#include "util.h" - -DepfileParser::DepfileParser(DepfileParserOptions options) - : options_(options) -{ -} - -// A note on backslashes in Makefiles, from reading the docs: -// Backslash-newline is the line continuation character. -// Backslash-# escapes a # (otherwise meaningful as a comment start). -// Backslash-% escapes a % (otherwise meaningful as a special). -// Finally, quoting the GNU manual, "Backslashes that are not in danger -// of quoting ‘%’ characters go unmolested." -// How do you end a line with a backslash? The netbsd Make docs suggest -// reading the result of a shell command echoing a backslash! -// -// Rather than implement all of above, we follow what GCC/Clang produces: -// Backslashes escape a space or hash sign. -// When a space is preceded by 2N+1 backslashes, it is represents N backslashes -// followed by space. -// When a space is preceded by 2N backslashes, it represents 2N backslashes at -// the end of a filename. -// A hash sign is escaped by a single backslash. All other backslashes remain -// unchanged. -// -// If anyone actually has depfiles that rely on the more complicated -// behavior we can adjust this. -bool DepfileParser::Parse(string* content, string* err) { - // in: current parser input point. - // end: end of input. - // parsing_targets: whether we are parsing targets or dependencies. - char* in = &(*content)[0]; - char* end = in + content->size(); - bool have_target = false; - bool have_secondary_target_on_this_rule = false; - bool have_newline_since_primary_target = false; - bool warned_distinct_target_lines = false; - bool parsing_targets = true; - while (in < end) { - bool have_newline = false; - // out: current output point (typically same as in, but can fall behind - // as we de-escape backslashes). - char* out = in; - // filename: start of the current parsed filename. - char* filename = out; - for (;;) { - // start: beginning of the current parsed span. - const char* start = in; - char* yymarker = NULL; - - { - unsigned char yych; - static const unsigned char yybm[] = { - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 128, 0, 0, 0, 128, 0, 0, - 128, 128, 0, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 0, 0, 128, 0, 0, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 0, 128, 0, 128, - 0, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 0, 128, 128, 0, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - }; - yych = *in; - if (yybm[0+yych] & 128) { - goto yy9; - } - if (yych <= '\r') { - if (yych <= '\t') { - if (yych >= 0x01) goto yy4; - } else { - if (yych <= '\n') goto yy6; - if (yych <= '\f') goto yy4; - goto yy8; - } - } else { - if (yych <= '$') { - if (yych <= '#') goto yy4; - goto yy12; - } else { - if (yych <= '?') goto yy4; - if (yych <= '\\') goto yy13; - goto yy4; - } - } - ++in; - { - break; - } -yy4: - ++in; -yy5: - { - // For any other character (e.g. whitespace), swallow it here, - // allowing the outer logic to loop around again. - break; - } -yy6: - ++in; - { - // A newline ends the current file name and the current rule. - have_newline = true; - break; - } -yy8: - yych = *++in; - if (yych == '\n') goto yy6; - goto yy5; -yy9: - yych = *++in; - if (yybm[0+yych] & 128) { - goto yy9; - } -yy11: - { - // Got a span of plain text. - int len = (int)(in - start); - // Need to shift it over if we're overwriting backslashes. - if (out < start) - memmove(out, start, len); - out += len; - continue; - } -yy12: - yych = *++in; - if (yych == '$') goto yy14; - goto yy5; -yy13: - yych = *(yymarker = ++in); - if (yych <= 0x1F) { - if (yych <= '\n') { - if (yych <= 0x00) goto yy5; - if (yych <= '\t') goto yy16; - goto yy17; - } else { - if (yych == '\r') goto yy19; - goto yy16; - } - } else { - if (yych <= '#') { - if (yych <= ' ') goto yy21; - if (yych <= '"') goto yy16; - goto yy23; - } else { - if (yych == '\\') goto yy25; - goto yy16; - } - } -yy14: - ++in; - { - // De-escape dollar character. - *out++ = '$'; - continue; - } -yy16: - ++in; - goto yy11; -yy17: - ++in; - { - // A line continuation ends the current file name. - break; - } -yy19: - yych = *++in; - if (yych == '\n') goto yy17; - in = yymarker; - goto yy5; -yy21: - ++in; - { - // 2N+1 backslashes plus space -> N backslashes plus space. - int len = (int)(in - start); - int n = len / 2 - 1; - if (out < start) - memset(out, '\\', n); - out += n; - *out++ = ' '; - continue; - } -yy23: - ++in; - { - // De-escape hash sign, but preserve other leading backslashes. - int len = (int)(in - start); - if (len > 2 && out < start) - memset(out, '\\', len - 2); - out += len - 2; - *out++ = '#'; - continue; - } -yy25: - yych = *++in; - if (yych <= 0x1F) { - if (yych <= '\n') { - if (yych <= 0x00) goto yy11; - if (yych <= '\t') goto yy16; - goto yy11; - } else { - if (yych == '\r') goto yy11; - goto yy16; - } - } else { - if (yych <= '#') { - if (yych <= ' ') goto yy26; - if (yych <= '"') goto yy16; - goto yy23; - } else { - if (yych == '\\') goto yy28; - goto yy16; - } - } -yy26: - ++in; - { - // 2N backslashes plus space -> 2N backslashes, end of filename. - int len = (int)(in - start); - if (out < start) - memset(out, '\\', len - 1); - out += len - 1; - break; - } -yy28: - yych = *++in; - if (yych <= 0x1F) { - if (yych <= '\n') { - if (yych <= 0x00) goto yy11; - if (yych <= '\t') goto yy16; - goto yy11; - } else { - if (yych == '\r') goto yy11; - goto yy16; - } - } else { - if (yych <= '#') { - if (yych <= ' ') goto yy21; - if (yych <= '"') goto yy16; - goto yy23; - } else { - if (yych == '\\') goto yy25; - goto yy16; - } - } - } - - } - - int len = (int)(out - filename); - const bool is_dependency = !parsing_targets; - if (len > 0 && filename[len - 1] == ':') { - len--; // Strip off trailing colon, if any. - parsing_targets = false; - have_target = true; - } - - if (len > 0) { - if (is_dependency) { - if (have_secondary_target_on_this_rule) { - if (!have_newline_since_primary_target) { - *err = "depfile has multiple output paths"; - return false; - } else if (options_.depfile_distinct_target_lines_action_ == - kDepfileDistinctTargetLinesActionError) { - *err = - "depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]"; - return false; - } else { - if (!warned_distinct_target_lines) { - warned_distinct_target_lines = true; - Warning("depfile has multiple output paths (on separate lines); " - "continuing anyway [-w depfilemulti=warn]"); - } - continue; - } - } - ins_.push_back(StringPiece(filename, len)); - } else if (!out_.str_) { - out_ = StringPiece(filename, len); - } else if (out_ != StringPiece(filename, len)) { - have_secondary_target_on_this_rule = true; - } - } - - if (have_newline) { - // A newline ends a rule so the next filename will be a new target. - parsing_targets = true; - have_secondary_target_on_this_rule = false; - if (have_target) { - have_newline_since_primary_target = true; - } - } - } - if (!have_target) { - *err = "expected ':' in depfile"; - return false; - } - return true; -} diff --git a/ninja/src/depfile_parser.h b/ninja/src/depfile_parser.h deleted file mode 100644 index be203746d6d..00000000000 --- a/ninja/src/depfile_parser.h +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_DEPFILE_PARSER_H_ -#define NINJA_DEPFILE_PARSER_H_ - -#include -#include -using namespace std; - -#include "string_piece.h" - -enum DepfileDistinctTargetLinesAction { - kDepfileDistinctTargetLinesActionWarn, - kDepfileDistinctTargetLinesActionError, -}; - -struct DepfileParserOptions { - DepfileParserOptions() - : depfile_distinct_target_lines_action_( - kDepfileDistinctTargetLinesActionWarn) {} - DepfileDistinctTargetLinesAction - depfile_distinct_target_lines_action_; -}; - -/// Parser for the dependency information emitted by gcc's -M flags. -struct DepfileParser { - explicit DepfileParser(DepfileParserOptions options = - DepfileParserOptions()); - - /// Parse an input file. Input must be NUL-terminated. - /// Warning: may mutate the content in-place and parsed StringPieces are - /// pointers within it. - bool Parse(string* content, string* err); - - StringPiece out_; - vector ins_; - DepfileParserOptions options_; -}; - -#endif // NINJA_DEPFILE_PARSER_H_ diff --git a/ninja/src/depfile_parser.in.cc b/ninja/src/depfile_parser.in.cc deleted file mode 100644 index 735a0c3a0e3..00000000000 --- a/ninja/src/depfile_parser.in.cc +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "depfile_parser.h" -#include "util.h" - -DepfileParser::DepfileParser(DepfileParserOptions options) - : options_(options) -{ -} - -// A note on backslashes in Makefiles, from reading the docs: -// Backslash-newline is the line continuation character. -// Backslash-# escapes a # (otherwise meaningful as a comment start). -// Backslash-% escapes a % (otherwise meaningful as a special). -// Finally, quoting the GNU manual, "Backslashes that are not in danger -// of quoting ‘%’ characters go unmolested." -// How do you end a line with a backslash? The netbsd Make docs suggest -// reading the result of a shell command echoing a backslash! -// -// Rather than implement all of above, we follow what GCC/Clang produces: -// Backslashes escape a space or hash sign. -// When a space is preceded by 2N+1 backslashes, it is represents N backslashes -// followed by space. -// When a space is preceded by 2N backslashes, it represents 2N backslashes at -// the end of a filename. -// A hash sign is escaped by a single backslash. All other backslashes remain -// unchanged. -// -// If anyone actually has depfiles that rely on the more complicated -// behavior we can adjust this. -bool DepfileParser::Parse(string* content, string* err) { - // in: current parser input point. - // end: end of input. - // parsing_targets: whether we are parsing targets or dependencies. - char* in = &(*content)[0]; - char* end = in + content->size(); - bool have_target = false; - bool have_secondary_target_on_this_rule = false; - bool have_newline_since_primary_target = false; - bool warned_distinct_target_lines = false; - bool parsing_targets = true; - while (in < end) { - bool have_newline = false; - // out: current output point (typically same as in, but can fall behind - // as we de-escape backslashes). - char* out = in; - // filename: start of the current parsed filename. - char* filename = out; - for (;;) { - // start: beginning of the current parsed span. - const char* start = in; - char* yymarker = NULL; - /*!re2c - re2c:define:YYCTYPE = "unsigned char"; - re2c:define:YYCURSOR = in; - re2c:define:YYLIMIT = end; - re2c:define:YYMARKER = yymarker; - - re2c:yyfill:enable = 0; - - re2c:indent:top = 2; - re2c:indent:string = " "; - - nul = "\000"; - newline = '\r'?'\n'; - - '\\\\'* '\\ ' { - // 2N+1 backslashes plus space -> N backslashes plus space. - int len = (int)(in - start); - int n = len / 2 - 1; - if (out < start) - memset(out, '\\', n); - out += n; - *out++ = ' '; - continue; - } - '\\\\'+ ' ' { - // 2N backslashes plus space -> 2N backslashes, end of filename. - int len = (int)(in - start); - if (out < start) - memset(out, '\\', len - 1); - out += len - 1; - break; - } - '\\'+ '#' { - // De-escape hash sign, but preserve other leading backslashes. - int len = (int)(in - start); - if (len > 2 && out < start) - memset(out, '\\', len - 2); - out += len - 2; - *out++ = '#'; - continue; - } - '$$' { - // De-escape dollar character. - *out++ = '$'; - continue; - } - '\\'+ [^\000\r\n] | [a-zA-Z0-9+,/_:.~()}{%=@\x5B\x5D!\x80-\xFF-]+ { - // Got a span of plain text. - int len = (int)(in - start); - // Need to shift it over if we're overwriting backslashes. - if (out < start) - memmove(out, start, len); - out += len; - continue; - } - nul { - break; - } - '\\' newline { - // A line continuation ends the current file name. - break; - } - newline { - // A newline ends the current file name and the current rule. - have_newline = true; - break; - } - [^] { - // For any other character (e.g. whitespace), swallow it here, - // allowing the outer logic to loop around again. - break; - } - */ - } - - int len = (int)(out - filename); - const bool is_dependency = !parsing_targets; - if (len > 0 && filename[len - 1] == ':') { - len--; // Strip off trailing colon, if any. - parsing_targets = false; - have_target = true; - } - - if (len > 0) { - if (is_dependency) { - if (have_secondary_target_on_this_rule) { - if (!have_newline_since_primary_target) { - *err = "depfile has multiple output paths"; - return false; - } else if (options_.depfile_distinct_target_lines_action_ == - kDepfileDistinctTargetLinesActionError) { - *err = - "depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]"; - return false; - } else { - if (!warned_distinct_target_lines) { - warned_distinct_target_lines = true; - Warning("depfile has multiple output paths (on separate lines); " - "continuing anyway [-w depfilemulti=warn]"); - } - continue; - } - } - ins_.push_back(StringPiece(filename, len)); - } else if (!out_.str_) { - out_ = StringPiece(filename, len); - } else if (out_ != StringPiece(filename, len)) { - have_secondary_target_on_this_rule = true; - } - } - - if (have_newline) { - // A newline ends a rule so the next filename will be a new target. - parsing_targets = true; - have_secondary_target_on_this_rule = false; - if (have_target) { - have_newline_since_primary_target = true; - } - } - } - if (!have_target) { - *err = "expected ':' in depfile"; - return false; - } - return true; -} diff --git a/ninja/src/depfile_parser_perftest.cc b/ninja/src/depfile_parser_perftest.cc deleted file mode 100644 index b21522168df..00000000000 --- a/ninja/src/depfile_parser_perftest.cc +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include - -#include "depfile_parser.h" -#include "util.h" -#include "metrics.h" - -int main(int argc, char* argv[]) { - if (argc < 2) { - printf("usage: %s \n", argv[0]); - return 1; - } - - vector times; - for (int i = 1; i < argc; ++i) { - const char* filename = argv[i]; - - for (int limit = 1 << 10; limit < (1<<20); limit *= 2) { - int64_t start = GetTimeMillis(); - for (int rep = 0; rep < limit; ++rep) { - string buf; - string err; - if (ReadFile(filename, &buf, &err) < 0) { - printf("%s: %s\n", filename, err.c_str()); - return 1; - } - - DepfileParser parser; - if (!parser.Parse(&buf, &err)) { - printf("%s: %s\n", filename, err.c_str()); - return 1; - } - } - int64_t end = GetTimeMillis(); - - if (end - start > 100) { - int delta = (int)(end - start); - float time = delta*1000 / (float)limit; - printf("%s: %.1fus\n", filename, time); - times.push_back(time); - break; - } - } - } - - if (!times.empty()) { - float min = times[0]; - float max = times[0]; - float total = 0; - for (size_t i = 0; i < times.size(); ++i) { - total += times[i]; - if (times[i] < min) - min = times[i]; - else if (times[i] > max) - max = times[i]; - } - - printf("min %.1fus max %.1fus avg %.1fus\n", - min, max, total / times.size()); - } - - return 0; -} diff --git a/ninja/src/depfile_parser_test.cc b/ninja/src/depfile_parser_test.cc deleted file mode 100644 index 19224f35bab..00000000000 --- a/ninja/src/depfile_parser_test.cc +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "depfile_parser.h" - -#include "test.h" - -struct DepfileParserTest : public testing::Test { - bool Parse(const char* input, string* err); - - DepfileParser parser_; - string input_; -}; - -bool DepfileParserTest::Parse(const char* input, string* err) { - input_ = input; - return parser_.Parse(&input_, err); -} - -TEST_F(DepfileParserTest, Basic) { - string err; - EXPECT_TRUE(Parse( -"build/ninja.o: ninja.cc ninja.h eval_env.h manifest_parser.h\n", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("build/ninja.o", parser_.out_.AsString()); - EXPECT_EQ(4u, parser_.ins_.size()); -} - -TEST_F(DepfileParserTest, EarlyNewlineAndWhitespace) { - string err; - EXPECT_TRUE(Parse( -" \\\n" -" out: in\n", - &err)); - ASSERT_EQ("", err); -} - -TEST_F(DepfileParserTest, Continuation) { - string err; - EXPECT_TRUE(Parse( -"foo.o: \\\n" -" bar.h baz.h\n", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("foo.o", parser_.out_.AsString()); - EXPECT_EQ(2u, parser_.ins_.size()); -} - -TEST_F(DepfileParserTest, CarriageReturnContinuation) { - string err; - EXPECT_TRUE(Parse( -"foo.o: \\\r\n" -" bar.h baz.h\r\n", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("foo.o", parser_.out_.AsString()); - EXPECT_EQ(2u, parser_.ins_.size()); -} - -TEST_F(DepfileParserTest, BackSlashes) { - string err; - EXPECT_TRUE(Parse( -"Project\\Dir\\Build\\Release8\\Foo\\Foo.res : \\\n" -" Dir\\Library\\Foo.rc \\\n" -" Dir\\Library\\Version\\Bar.h \\\n" -" Dir\\Library\\Foo.ico \\\n" -" Project\\Thing\\Bar.tlb \\\n", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("Project\\Dir\\Build\\Release8\\Foo\\Foo.res", - parser_.out_.AsString()); - EXPECT_EQ(4u, parser_.ins_.size()); -} - -TEST_F(DepfileParserTest, Spaces) { - string err; - EXPECT_TRUE(Parse( -"a\\ bc\\ def: a\\ b c d", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("a bc def", - parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("a b", - parser_.ins_[0].AsString()); - EXPECT_EQ("c", - parser_.ins_[1].AsString()); - EXPECT_EQ("d", - parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, MultipleBackslashes) { - // Successive 2N+1 backslashes followed by space (' ') are replaced by N >= 0 - // backslashes and the space. A single backslash before hash sign is removed. - // Other backslashes remain untouched (including 2N backslashes followed by - // space). - string err; - EXPECT_TRUE(Parse( -"a\\ b\\#c.h: \\\\\\\\\\ \\\\\\\\ \\\\share\\info\\\\#1", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("a b#c.h", - parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("\\\\ ", - parser_.ins_[0].AsString()); - EXPECT_EQ("\\\\\\\\", - parser_.ins_[1].AsString()); - EXPECT_EQ("\\\\share\\info\\#1", - parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, Escapes) { - // Put backslashes before a variety of characters, see which ones make - // it through. - string err; - EXPECT_TRUE(Parse( -"\\!\\@\\#$$\\%\\^\\&\\[\\]\\\\:", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("\\!\\@#$\\%\\^\\&\\[\\]\\\\", - parser_.out_.AsString()); - ASSERT_EQ(0u, parser_.ins_.size()); -} - -TEST_F(DepfileParserTest, SpecialChars) { - // See filenames like istreambuf.iterator_op!= in - // https://github.com/google/libcxx/tree/master/test/iterators/stream.iterators/istreambuf.iterator/ - string err; - EXPECT_TRUE(Parse( -"C:/Program\\ Files\\ (x86)/Microsoft\\ crtdefs.h: \\\n" -" en@quot.header~ t+t-x!=1 \\\n" -" openldap/slapd.d/cn=config/cn=schema/cn={0}core.ldif\\\n" -" Fu\303\244ball\\\n" -" a[1]b@2%c", - &err)); - ASSERT_EQ("", err); - EXPECT_EQ("C:/Program Files (x86)/Microsoft crtdefs.h", - parser_.out_.AsString()); - ASSERT_EQ(5u, parser_.ins_.size()); - EXPECT_EQ("en@quot.header~", - parser_.ins_[0].AsString()); - EXPECT_EQ("t+t-x!=1", - parser_.ins_[1].AsString()); - EXPECT_EQ("openldap/slapd.d/cn=config/cn=schema/cn={0}core.ldif", - parser_.ins_[2].AsString()); - EXPECT_EQ("Fu\303\244ball", - parser_.ins_[3].AsString()); - EXPECT_EQ("a[1]b@2%c", - parser_.ins_[4].AsString()); -} - -TEST_F(DepfileParserTest, UnifyMultipleOutputs) { - // check that multiple duplicate targets are properly unified - string err; - EXPECT_TRUE(Parse("foo foo: x y z", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, RejectMultipleDifferentOutputs) { - // check that multiple different outputs are rejected by the parser - string err; - EXPECT_FALSE(Parse("foo bar: x y z", &err)); - ASSERT_EQ("depfile has multiple output paths", err); -} - -TEST_F(DepfileParserTest, MultipleEmptyRules) { - string err; - EXPECT_TRUE(Parse("foo: x\n" - "foo: \n" - "foo:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(1u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); -} - -TEST_F(DepfileParserTest, UnifyMultipleRulesLF) { - string err; - EXPECT_TRUE(Parse("foo: x\n" - "foo: y\n" - "foo \\\n" - "foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, UnifyMultipleRulesCRLF) { - string err; - EXPECT_TRUE(Parse("foo: x\r\n" - "foo: y\r\n" - "foo \\\r\n" - "foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, UnifyMixedRulesLF) { - string err; - EXPECT_TRUE(Parse("foo: x\\\n" - " y\n" - "foo \\\n" - "foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, UnifyMixedRulesCRLF) { - string err; - EXPECT_TRUE(Parse("foo: x\\\r\n" - " y\r\n" - "foo \\\r\n" - "foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, IndentedRulesLF) { - string err; - EXPECT_TRUE(Parse(" foo: x\n" - " foo: y\n" - " foo: z\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, IndentedRulesCRLF) { - string err; - EXPECT_TRUE(Parse(" foo: x\r\n" - " foo: y\r\n" - " foo: z\r\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, TolerateMP) { - string err; - EXPECT_TRUE(Parse("foo: x y z\n" - "x:\n" - "y:\n" - "z:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, MultipleRulesTolerateMP) { - string err; - EXPECT_TRUE(Parse("foo: x\n" - "x:\n" - "foo: y\n" - "y:\n" - "foo: z\n" - "z:\n", &err)); - ASSERT_EQ("foo", parser_.out_.AsString()); - ASSERT_EQ(3u, parser_.ins_.size()); - EXPECT_EQ("x", parser_.ins_[0].AsString()); - EXPECT_EQ("y", parser_.ins_[1].AsString()); - EXPECT_EQ("z", parser_.ins_[2].AsString()); -} - -TEST_F(DepfileParserTest, MultipleRulesRejectDifferentOutputs) { - // check that multiple different outputs are rejected by the parser - // when spread across multiple rules - DepfileParserOptions parser_opts; - parser_opts.depfile_distinct_target_lines_action_ = - kDepfileDistinctTargetLinesActionError; - DepfileParser parser(parser_opts); - string err; - string input = - "foo: x y\n" - "bar: y z\n"; - EXPECT_FALSE(parser.Parse(&input, &err)); - ASSERT_EQ("depfile has multiple output paths (on separate lines)" - " [-w depfilemulti=err]", err); -} diff --git a/ninja/src/deps_log.cc b/ninja/src/deps_log.cc deleted file mode 100644 index f717b263d8f..00000000000 --- a/ninja/src/deps_log.cc +++ /dev/null @@ -1,419 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "deps_log.h" - -#include -#include -#include -#include -#ifndef _WIN32 -#include -#elif defined(_MSC_VER) && (_MSC_VER < 1900) -typedef __int32 int32_t; -typedef unsigned __int32 uint32_t; -#endif - -#include "graph.h" -#include "metrics.h" -#include "state.h" -#include "util.h" -#if 0 -// The version is stored as 4 bytes after the signature and also serves as a -// byte order mark. Signature and version combined are 16 bytes long. -const char kFileSignature[] = "# ninjadeps\n"; -const int kCurrentVersion = 4; - -// Record size is currently limited to less than the full 32 bit, due to -// internal buffers having to have this size. -const unsigned kMaxRecordSize = (1 << 19) - 1; - -DepsLog::~DepsLog() { - Close(); -} - -bool DepsLog::OpenForWrite(const string& path, string* err) { - if (needs_recompaction_) { - if (!Recompact(path, err)) - return false; - } - - file_ = fopen(path.c_str(), "ab"); - if (!file_) { - *err = strerror(errno); - return false; - } - // Set the buffer size to this and flush the file buffer after every record - // to make sure records aren't written partially. - setvbuf(file_, NULL, _IOFBF, kMaxRecordSize + 1); - SetCloseOnExec(fileno(file_)); - - // Opening a file in append mode doesn't set the file pointer to the file's - // end on Windows. Do that explicitly. - fseek(file_, 0, SEEK_END); - - if (ftell(file_) == 0) { - if (fwrite(kFileSignature, sizeof(kFileSignature) - 1, 1, file_) < 1) { - *err = strerror(errno); - return false; - } - if (fwrite(&kCurrentVersion, 4, 1, file_) < 1) { - *err = strerror(errno); - return false; - } - } - if (fflush(file_) != 0) { - *err = strerror(errno); - return false; - } - return true; -} - -bool DepsLog::RecordDeps(Node* node, TimeStamp mtime, - const vector& nodes) { - return RecordDeps(node, mtime, nodes.size(), - nodes.empty() ? NULL : (Node**)&nodes.front()); -} - -bool DepsLog::RecordDeps(Node* node, TimeStamp mtime, - int node_count, Node** nodes) { - // Track whether there's any new data to be recorded. - bool made_change = false; - - // Assign ids to all nodes that are missing one. - if (node->id() < 0) { - if (!RecordId(node)) - return false; - made_change = true; - } - for (int i = 0; i < node_count; ++i) { - if (nodes[i]->id() < 0) { - if (!RecordId(nodes[i])) - return false; - made_change = true; - } - } - - // See if the new data is different than the existing data, if any. - if (!made_change) { - Deps* deps = GetDeps(node); - if (!deps || - deps->mtime != mtime || - deps->node_count != node_count) { - made_change = true; - } else { - for (int i = 0; i < node_count; ++i) { - if (deps->nodes[i] != nodes[i]) { - made_change = true; - break; - } - } - } - } - - // Don't write anything if there's no new info. - if (!made_change) - return true; - - // Update on-disk representation. - unsigned size = 4 * (1 + 2 + node_count); - if (size > kMaxRecordSize) { - errno = ERANGE; - return false; - } - size |= 0x80000000; // Deps record: set high bit. - if (fwrite(&size, 4, 1, file_) < 1) - return false; - int id = node->id(); - if (fwrite(&id, 4, 1, file_) < 1) - return false; - uint32_t mtime_part = static_cast(mtime & 0xffffffff); - if (fwrite(&mtime_part, 4, 1, file_) < 1) - return false; - mtime_part = static_cast((mtime >> 32) & 0xffffffff); - if (fwrite(&mtime_part, 4, 1, file_) < 1) - return false; - for (int i = 0; i < node_count; ++i) { - id = nodes[i]->id(); - if (fwrite(&id, 4, 1, file_) < 1) - return false; - } - if (fflush(file_) != 0) - return false; - - // Update in-memory representation. - Deps* deps = new Deps(mtime, node_count); - for (int i = 0; i < node_count; ++i) - deps->nodes[i] = nodes[i]; - UpdateDeps(node->id(), deps); - - return true; -} - -void DepsLog::Close() { - if (file_) - fclose(file_); - file_ = NULL; -} - -bool DepsLog::Load(const string& path, State* state, string* err) { - METRIC_RECORD(".ninja_deps load"); - char buf[kMaxRecordSize + 1]; - FILE* f = fopen(path.c_str(), "rb"); - if (!f) { - if (errno == ENOENT) - return true; - *err = strerror(errno); - return false; - } - - bool valid_header = true; - int version = 0; - if (!fgets(buf, sizeof(buf), f) || fread(&version, 4, 1, f) < 1) - valid_header = false; - // Note: For version differences, this should migrate to the new format. - // But the v1 format could sometimes (rarely) end up with invalid data, so - // don't migrate v1 to v3 to force a rebuild. (v2 only existed for a few days, - // and there was no release with it, so pretend that it never happened.) - if (!valid_header || strcmp(buf, kFileSignature) != 0 || - version != kCurrentVersion) { - if (version == 1) - *err = "deps log version change; rebuilding"; - else - *err = "bad deps log signature or version; starting over"; - fclose(f); - unlink(path.c_str()); - // Don't report this as a failure. An empty deps log will cause - // us to rebuild the outputs anyway. - return true; - } - - long offset; - bool read_failed = false; - int unique_dep_record_count = 0; - int total_dep_record_count = 0; - for (;;) { - offset = ftell(f); - - unsigned size; - if (fread(&size, 4, 1, f) < 1) { - if (!feof(f)) - read_failed = true; - break; - } - bool is_deps = (size >> 31) != 0; - size = size & 0x7FFFFFFF; - - if (size > kMaxRecordSize || fread(buf, size, 1, f) < 1) { - read_failed = true; - break; - } - - if (is_deps) { - assert(size % 4 == 0); - int* deps_data = reinterpret_cast(buf); - int out_id = deps_data[0]; - TimeStamp mtime; - mtime = (TimeStamp)(((uint64_t)(unsigned int)deps_data[2] << 32) | - (uint64_t)(unsigned int)deps_data[1]); - deps_data += 3; - int deps_count = (size / 4) - 3; - - Deps* deps = new Deps(mtime, deps_count); - for (int i = 0; i < deps_count; ++i) { - assert(deps_data[i] < (int)nodes_.size()); - assert(nodes_[deps_data[i]]); - deps->nodes[i] = nodes_[deps_data[i]]; - } - - total_dep_record_count++; - if (!UpdateDeps(out_id, deps)) - ++unique_dep_record_count; - } else { - int path_size = size - 4; - assert(path_size > 0); // CanonicalizePath() rejects empty paths. - // There can be up to 3 bytes of padding. - if (buf[path_size - 1] == '\0') --path_size; - if (buf[path_size - 1] == '\0') --path_size; - if (buf[path_size - 1] == '\0') --path_size; - StringPiece subpath(buf, path_size); - // It is not necessary to pass in a correct slash_bits here. It will - // either be a Node that's in the manifest (in which case it will already - // have a correct slash_bits that GetNode will look up), or it is an - // implicit dependency from a .d which does not affect the build command - // (and so need not have its slashes maintained). - Node* node = state->GetNode(subpath, 0); - - // Check that the expected index matches the actual index. This can only - // happen if two ninja processes write to the same deps log concurrently. - // (This uses unary complement to make the checksum look less like a - // dependency record entry.) - unsigned checksum = *reinterpret_cast(buf + size - 4); - int expected_id = ~checksum; - int id = nodes_.size(); - if (id != expected_id) { - read_failed = true; - break; - } - - assert(node->id() < 0); - node->set_id(id); - nodes_.push_back(node); - } - } - - if (read_failed) { - // An error occurred while loading; try to recover by truncating the - // file to the last fully-read record. - if (ferror(f)) { - *err = strerror(ferror(f)); - } else { - *err = "premature end of file"; - } - fclose(f); - - if (!Truncate(path, offset, err)) - return false; - - // The truncate succeeded; we'll just report the load error as a - // warning because the build can proceed. - *err += "; recovering"; - return true; - } - - fclose(f); - - // Rebuild the log if there are too many dead records. - int kMinCompactionEntryCount = 1000; - int kCompactionRatio = 3; - if (total_dep_record_count > kMinCompactionEntryCount && - total_dep_record_count > unique_dep_record_count * kCompactionRatio) { - needs_recompaction_ = true; - } - - return true; -} - -DepsLog::Deps* DepsLog::GetDeps(Node* node) { - // Abort if the node has no id (never referenced in the deps) or if - // there's no deps recorded for the node. - if (node->id() < 0 || node->id() >= (int)deps_.size()) - return NULL; - return deps_[node->id()]; -} - -bool DepsLog::Recompact(const string& path, string* err) { - METRIC_RECORD(".ninja_deps recompact"); - - Close(); - string temp_path = path + ".recompact"; - - // OpenForWrite() opens for append. Make sure it's not appending to a - // left-over file from a previous recompaction attempt that crashed somehow. - unlink(temp_path.c_str()); - - DepsLog new_log; - if (!new_log.OpenForWrite(temp_path, err)) - return false; - - // Clear all known ids so that new ones can be reassigned. The new indices - // will refer to the ordering in new_log, not in the current log. - for (vector::iterator i = nodes_.begin(); i != nodes_.end(); ++i) - (*i)->set_id(-1); - - // Write out all deps again. - for (int old_id = 0; old_id < (int)deps_.size(); ++old_id) { - Deps* deps = deps_[old_id]; - if (!deps) continue; // If nodes_[old_id] is a leaf, it has no deps. - - if (!IsDepsEntryLiveFor(nodes_[old_id])) - continue; - - if (!new_log.RecordDeps(nodes_[old_id], deps->mtime, - deps->node_count, deps->nodes)) { - new_log.Close(); - return false; - } - } - - new_log.Close(); - - // All nodes now have ids that refer to new_log, so steal its data. - deps_.swap(new_log.deps_); - nodes_.swap(new_log.nodes_); - - if (unlink(path.c_str()) < 0) { - *err = strerror(errno); - return false; - } - - if (rename(temp_path.c_str(), path.c_str()) < 0) { - *err = strerror(errno); - return false; - } - - return true; -} - -bool DepsLog::IsDepsEntryLiveFor(Node* node) { - // Skip entries that don't have in-edges or whose edges don't have a - // "deps" attribute. They were in the deps log from previous builds, but - // the the files they were for were removed from the build and their deps - // entries are no longer needed. - // (Without the check for "deps", a chain of two or more nodes that each - // had deps wouldn't be collected in a single recompaction.) - return node->in_edge() && !node->in_edge()->GetBinding("deps").empty(); -} - -bool DepsLog::UpdateDeps(int out_id, Deps* deps) { - if (out_id >= (int)deps_.size()) - deps_.resize(out_id + 1); - - bool delete_old = deps_[out_id] != NULL; - if (delete_old) - delete deps_[out_id]; - deps_[out_id] = deps; - return delete_old; -} - -bool DepsLog::RecordId(Node* node) { - int path_size = node->path().size(); - int padding = (4 - path_size % 4) % 4; // Pad path to 4 byte boundary. - - unsigned size = path_size + padding + 4; - if (size > kMaxRecordSize) { - errno = ERANGE; - return false; - } - if (fwrite(&size, 4, 1, file_) < 1) - return false; - if (fwrite(node->path().data(), path_size, 1, file_) < 1) { - assert(node->path().size() > 0); - return false; - } - if (padding && fwrite("\0\0", padding, 1, file_) < 1) - return false; - int id = nodes_.size(); - unsigned checksum = ~(unsigned)id; - if (fwrite(&checksum, 4, 1, file_) < 1) - return false; - if (fflush(file_) != 0) - return false; - - node->set_id(id); - nodes_.push_back(node); - - return true; -} -#endif \ No newline at end of file diff --git a/ninja/src/deps_log.h b/ninja/src/deps_log.h deleted file mode 100644 index 705dd8867d5..00000000000 --- a/ninja/src/deps_log.h +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_DEPS_LOG_H_ -#define NINJA_DEPS_LOG_H_ - -#include -#include -using namespace std; - -#include - -#include "timestamp.h" - -struct Node; -struct State; -#if 1 -struct DepsLog {}; -#else -/// As build commands run they can output extra dependency information -/// (e.g. header dependencies for C source) dynamically. DepsLog collects -/// that information at build time and uses it for subsequent builds. -/// -/// The on-disk format is based on two primary design constraints: -/// - it must be written to as a stream (during the build, which may be -/// interrupted); -/// - it can be read all at once on startup. (Alternative designs, where -/// it contains indexing information, were considered and discarded as -/// too complicated to implement; if the file is small than reading it -/// fully on startup is acceptable.) -/// Here are some stats from the Windows Chrome dependency files, to -/// help guide the design space. The total text in the files sums to -/// 90mb so some compression is warranted to keep load-time fast. -/// There's about 10k files worth of dependencies that reference about -/// 40k total paths totalling 2mb of unique strings. -/// -/// Based on these stats, here's the current design. -/// The file is structured as version header followed by a sequence of records. -/// Each record is either a path string or a dependency list. -/// Numbering the path strings in file order gives them dense integer ids. -/// A dependency list maps an output id to a list of input ids. -/// -/// Concretely, a record is: -/// four bytes record length, high bit indicates record type -/// (but max record sizes are capped at 512kB) -/// path records contain the string name of the path, followed by up to 3 -/// padding bytes to align on 4 byte boundaries, followed by the -/// one's complement of the expected index of the record (to detect -/// concurrent writes of multiple ninja processes to the log). -/// dependency records are an array of 4-byte integers -/// [output path id, -/// output path mtime (lower 4 bytes), output path mtime (upper 4 bytes), -/// input path id, input path id...] -/// (The mtime is compared against the on-disk output path mtime -/// to verify the stored data is up-to-date.) -/// If two records reference the same output the latter one in the file -/// wins, allowing updates to just be appended to the file. A separate -/// repacking step can run occasionally to remove dead records. -struct DepsLog { - DepsLog() : needs_recompaction_(false), file_(NULL) {} - ~DepsLog(); - - // Writing (build-time) interface. - bool OpenForWrite(const string& path, string* err); - bool RecordDeps(Node* node, TimeStamp mtime, const vector& nodes); - bool RecordDeps(Node* node, TimeStamp mtime, int node_count, Node** nodes); - void Close(); - - // Reading (startup-time) interface. - struct Deps { - Deps(int64_t mtime, int node_count) - : mtime(mtime), node_count(node_count), nodes(new Node*[node_count]) {} - ~Deps() { delete [] nodes; } - TimeStamp mtime; - int node_count; - Node** nodes; - }; - bool Load(const string& path, State* state, string* err); - Deps* GetDeps(Node* node); - - /// Rewrite the known log entries, throwing away old data. - bool Recompact(const string& path, string* err); - - /// Returns if the deps entry for a node is still reachable from the manifest. - /// - /// The deps log can contain deps entries for files that were built in the - /// past but are no longer part of the manifest. This function returns if - /// this is the case for a given node. This function is slow, don't call - /// it from code that runs on every build. - bool IsDepsEntryLiveFor(Node* node); - - /// Used for tests. - const vector& nodes() const { return nodes_; } - const vector& deps() const { return deps_; } - - private: - // Updates the in-memory representation. Takes ownership of |deps|. - // Returns true if a prior deps record was deleted. - bool UpdateDeps(int out_id, Deps* deps); - // Write a node name record, assigning it an id. - bool RecordId(Node* node); - - bool needs_recompaction_; - FILE* file_; - - /// Maps id -> Node. - vector nodes_; - /// Maps id -> deps of that id. - vector deps_; - - friend struct DepsLogTest; -}; -#endif -#endif // NINJA_DEPS_LOG_H_ diff --git a/ninja/src/deps_log_test.cc b/ninja/src/deps_log_test.cc deleted file mode 100644 index 0cdeb45bed2..00000000000 --- a/ninja/src/deps_log_test.cc +++ /dev/null @@ -1,479 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "deps_log.h" - -#include -#ifndef _WIN32 -#include -#endif - -#include "graph.h" -#include "util.h" -#include "test.h" - -namespace { - -const char kTestFilename[] = "DepsLogTest-tempfile"; - -struct DepsLogTest : public testing::Test { - virtual void SetUp() { - // In case a crashing test left a stale file behind. - unlink(kTestFilename); - } - virtual void TearDown() { - unlink(kTestFilename); - } -}; - -TEST_F(DepsLogTest, WriteRead) { - State state1; - DepsLog log1; - string err; - EXPECT_TRUE(log1.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - { - vector deps; - deps.push_back(state1.GetNode("foo.h", 0)); - deps.push_back(state1.GetNode("bar.h", 0)); - log1.RecordDeps(state1.GetNode("out.o", 0), 1, deps); - - deps.clear(); - deps.push_back(state1.GetNode("foo.h", 0)); - deps.push_back(state1.GetNode("bar2.h", 0)); - log1.RecordDeps(state1.GetNode("out2.o", 0), 2, deps); - - DepsLog::Deps* log_deps = log1.GetDeps(state1.GetNode("out.o", 0)); - ASSERT_TRUE(log_deps); - ASSERT_EQ(1, log_deps->mtime); - ASSERT_EQ(2, log_deps->node_count); - ASSERT_EQ("foo.h", log_deps->nodes[0]->path()); - ASSERT_EQ("bar.h", log_deps->nodes[1]->path()); - } - - log1.Close(); - - State state2; - DepsLog log2; - EXPECT_TRUE(log2.Load(kTestFilename, &state2, &err)); - ASSERT_EQ("", err); - - ASSERT_EQ(log1.nodes().size(), log2.nodes().size()); - for (int i = 0; i < (int)log1.nodes().size(); ++i) { - Node* node1 = log1.nodes()[i]; - Node* node2 = log2.nodes()[i]; - ASSERT_EQ(i, node1->id()); - ASSERT_EQ(node1->id(), node2->id()); - } - - // Spot-check the entries in log2. - DepsLog::Deps* log_deps = log2.GetDeps(state2.GetNode("out2.o", 0)); - ASSERT_TRUE(log_deps); - ASSERT_EQ(2, log_deps->mtime); - ASSERT_EQ(2, log_deps->node_count); - ASSERT_EQ("foo.h", log_deps->nodes[0]->path()); - ASSERT_EQ("bar2.h", log_deps->nodes[1]->path()); -} - -TEST_F(DepsLogTest, LotsOfDeps) { - const int kNumDeps = 100000; // More than 64k. - - State state1; - DepsLog log1; - string err; - EXPECT_TRUE(log1.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - { - vector deps; - for (int i = 0; i < kNumDeps; ++i) { - char buf[32]; - sprintf(buf, "file%d.h", i); - deps.push_back(state1.GetNode(buf, 0)); - } - log1.RecordDeps(state1.GetNode("out.o", 0), 1, deps); - - DepsLog::Deps* log_deps = log1.GetDeps(state1.GetNode("out.o", 0)); - ASSERT_EQ(kNumDeps, log_deps->node_count); - } - - log1.Close(); - - State state2; - DepsLog log2; - EXPECT_TRUE(log2.Load(kTestFilename, &state2, &err)); - ASSERT_EQ("", err); - - DepsLog::Deps* log_deps = log2.GetDeps(state2.GetNode("out.o", 0)); - ASSERT_EQ(kNumDeps, log_deps->node_count); -} - -// Verify that adding the same deps twice doesn't grow the file. -TEST_F(DepsLogTest, DoubleEntry) { - // Write some deps to the file and grab its size. - int file_size; - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - log.Close(); - - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - file_size = (int)st.st_size; - ASSERT_GT(file_size, 0); - } - - // Now reload the file, and read the same deps. - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.Load(kTestFilename, &state, &err)); - - EXPECT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - log.Close(); - - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - int file_size_2 = (int)st.st_size; - ASSERT_EQ(file_size, file_size_2); - } -} - -// Verify that adding the new deps works and can be compacted away. -TEST_F(DepsLogTest, Recompact) { - const char kManifest[] = -"rule cc\n" -" command = cc\n" -" deps = gcc\n" -"build out.o: cc\n" -"build other_out.o: cc\n"; - - // Write some deps to the file and grab its size. - int file_size; - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, kManifest)); - DepsLog log; - string err; - ASSERT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - - deps.clear(); - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("baz.h", 0)); - log.RecordDeps(state.GetNode("other_out.o", 0), 1, deps); - - log.Close(); - - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - file_size = (int)st.st_size; - ASSERT_GT(file_size, 0); - } - - // Now reload the file, and add slightly different deps. - int file_size_2; - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, kManifest)); - DepsLog log; - string err; - ASSERT_TRUE(log.Load(kTestFilename, &state, &err)); - - ASSERT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - log.Close(); - - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - file_size_2 = (int)st.st_size; - // The file should grow to record the new deps. - ASSERT_GT(file_size_2, file_size); - } - - // Now reload the file, verify the new deps have replaced the old, then - // recompact. - int file_size_3; - { - State state; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state, kManifest)); - DepsLog log; - string err; - ASSERT_TRUE(log.Load(kTestFilename, &state, &err)); - - Node* out = state.GetNode("out.o", 0); - DepsLog::Deps* deps = log.GetDeps(out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(1, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - - Node* other_out = state.GetNode("other_out.o", 0); - deps = log.GetDeps(other_out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(2, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - ASSERT_EQ("baz.h", deps->nodes[1]->path()); - - ASSERT_TRUE(log.Recompact(kTestFilename, &err)); - - // The in-memory deps graph should still be valid after recompaction. - deps = log.GetDeps(out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(1, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - ASSERT_EQ(out, log.nodes()[out->id()]); - - deps = log.GetDeps(other_out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(2, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - ASSERT_EQ("baz.h", deps->nodes[1]->path()); - ASSERT_EQ(other_out, log.nodes()[other_out->id()]); - - // The file should have shrunk a bit for the smaller deps. - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - file_size_3 = (int)st.st_size; - ASSERT_LT(file_size_3, file_size_2); - } - - // Now reload the file and recompact with an empty manifest. The previous - // entries should be removed. - { - State state; - // Intentionally not parsing kManifest here. - DepsLog log; - string err; - ASSERT_TRUE(log.Load(kTestFilename, &state, &err)); - - Node* out = state.GetNode("out.o", 0); - DepsLog::Deps* deps = log.GetDeps(out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(1, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - - Node* other_out = state.GetNode("other_out.o", 0); - deps = log.GetDeps(other_out); - ASSERT_TRUE(deps); - ASSERT_EQ(1, deps->mtime); - ASSERT_EQ(2, deps->node_count); - ASSERT_EQ("foo.h", deps->nodes[0]->path()); - ASSERT_EQ("baz.h", deps->nodes[1]->path()); - - ASSERT_TRUE(log.Recompact(kTestFilename, &err)); - - // The previous entries should have been removed. - deps = log.GetDeps(out); - ASSERT_FALSE(deps); - - deps = log.GetDeps(other_out); - ASSERT_FALSE(deps); - - // The .h files pulled in via deps should no longer have ids either. - ASSERT_EQ(-1, state.LookupNode("foo.h")->id()); - ASSERT_EQ(-1, state.LookupNode("baz.h")->id()); - - // The file should have shrunk more. - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - int file_size_4 = (int)st.st_size; - ASSERT_LT(file_size_4, file_size_3); - } -} - -// Verify that invalid file headers cause a new build. -TEST_F(DepsLogTest, InvalidHeader) { - const char *kInvalidHeaders[] = { - "", // Empty file. - "# ninjad", // Truncated first line. - "# ninjadeps\n", // No version int. - "# ninjadeps\n\001\002", // Truncated version int. - "# ninjadeps\n\001\002\003\004" // Invalid version int. - }; - for (size_t i = 0; i < sizeof(kInvalidHeaders) / sizeof(kInvalidHeaders[0]); - ++i) { - FILE* deps_log = fopen(kTestFilename, "wb"); - ASSERT_TRUE(deps_log != NULL); - ASSERT_EQ( - strlen(kInvalidHeaders[i]), - fwrite(kInvalidHeaders[i], 1, strlen(kInvalidHeaders[i]), deps_log)); - ASSERT_EQ(0 ,fclose(deps_log)); - - string err; - DepsLog log; - State state; - ASSERT_TRUE(log.Load(kTestFilename, &state, &err)); - EXPECT_EQ("bad deps log signature or version; starting over", err); - } -} - -// Simulate what happens when loading a truncated log file. -TEST_F(DepsLogTest, Truncated) { - // Create a file with some entries. - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - - deps.clear(); - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar2.h", 0)); - log.RecordDeps(state.GetNode("out2.o", 0), 2, deps); - - log.Close(); - } - - // Get the file size. - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - - // Try reloading at truncated sizes. - // Track how many nodes/deps were found; they should decrease with - // smaller sizes. - int node_count = 5; - int deps_count = 2; - for (int size = (int)st.st_size; size > 0; --size) { - string err; - ASSERT_TRUE(Truncate(kTestFilename, size, &err)); - - State state; - DepsLog log; - EXPECT_TRUE(log.Load(kTestFilename, &state, &err)); - if (!err.empty()) { - // At some point the log will be so short as to be unparseable. - break; - } - - ASSERT_GE(node_count, (int)log.nodes().size()); - node_count = log.nodes().size(); - - // Count how many non-NULL deps entries there are. - int new_deps_count = 0; - for (vector::const_iterator i = log.deps().begin(); - i != log.deps().end(); ++i) { - if (*i) - ++new_deps_count; - } - ASSERT_GE(deps_count, new_deps_count); - deps_count = new_deps_count; - } -} - -// Run the truncation-recovery logic. -TEST_F(DepsLogTest, TruncatedRecovery) { - // Create a file with some entries. - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar.h", 0)); - log.RecordDeps(state.GetNode("out.o", 0), 1, deps); - - deps.clear(); - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar2.h", 0)); - log.RecordDeps(state.GetNode("out2.o", 0), 2, deps); - - log.Close(); - } - - // Shorten the file, corrupting the last record. - { - struct stat st; - ASSERT_EQ(0, stat(kTestFilename, &st)); - string err; - ASSERT_TRUE(Truncate(kTestFilename, st.st_size - 2, &err)); - } - - // Load the file again, add an entry. - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.Load(kTestFilename, &state, &err)); - ASSERT_EQ("premature end of file; recovering", err); - err.clear(); - - // The truncated entry should've been discarded. - EXPECT_EQ(NULL, log.GetDeps(state.GetNode("out2.o", 0))); - - EXPECT_TRUE(log.OpenForWrite(kTestFilename, &err)); - ASSERT_EQ("", err); - - // Add a new entry. - vector deps; - deps.push_back(state.GetNode("foo.h", 0)); - deps.push_back(state.GetNode("bar2.h", 0)); - log.RecordDeps(state.GetNode("out2.o", 0), 3, deps); - - log.Close(); - } - - // Load the file a third time to verify appending after a mangled - // entry doesn't break things. - { - State state; - DepsLog log; - string err; - EXPECT_TRUE(log.Load(kTestFilename, &state, &err)); - - // The truncated entry should exist. - DepsLog::Deps* deps = log.GetDeps(state.GetNode("out2.o", 0)); - ASSERT_TRUE(deps); - } -} - -} // anonymous namespace diff --git a/ninja/src/disk_interface.cc b/ninja/src/disk_interface.cc deleted file mode 100644 index 8a23af21c0d..00000000000 --- a/ninja/src/disk_interface.cc +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "disk_interface.h" - -#include - -#include -#include -#include -#include -#include - -#ifdef _WIN32 -#include -#include -#include // _mkdir -#endif - -#include "metrics.h" -#include "util.h" - -namespace { - -string DirName(const string& path) { -#ifdef _WIN32 - static const char kPathSeparators[] = "\\/"; -#else - static const char kPathSeparators[] = "/"; -#endif - static const char* const kEnd = kPathSeparators + sizeof(kPathSeparators) - 1; - - string::size_type slash_pos = path.find_last_of(kPathSeparators); - if (slash_pos == string::npos) - return string(); // Nothing to do. - while (slash_pos > 0 && - std::find(kPathSeparators, kEnd, path[slash_pos - 1]) != kEnd) - --slash_pos; - return path.substr(0, slash_pos); -} - -int MakeDir(const string& path) { -#ifdef _WIN32 - return _mkdir(path.c_str()); -#else - return mkdir(path.c_str(), 0777); -#endif -} - -#ifdef _WIN32 -TimeStamp TimeStampFromFileTime(const FILETIME& filetime) { - // FILETIME is in 100-nanosecond increments since the Windows epoch. - // We don't much care about epoch correctness but we do want the - // resulting value to fit in a 64-bit integer. - uint64_t mtime = ((uint64_t)filetime.dwHighDateTime << 32) | - ((uint64_t)filetime.dwLowDateTime); - // 1600 epoch -> 2000 epoch (subtract 400 years). - TimeStamp output = (TimeStamp)mtime - 11983680000LL * (1000000000LL / 100); - if(output <= 0){ - return 0; - } else { - return output; - } -} - -TimeStamp StatSingleFile(const string& path, string* err) { - WIN32_FILE_ATTRIBUTE_DATA attrs; - if (!GetFileAttributesExA(path.c_str(), GetFileExInfoStandard, &attrs)) { - DWORD win_err = GetLastError(); - if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) - return 0; - *err = "GetFileAttributesEx(" + path + "): " + GetLastErrorString(); - return -1; - } - return TimeStampFromFileTime(attrs.ftLastWriteTime); -} - -bool IsWindows7OrLater() { - OSVERSIONINFOEX version_info = - { sizeof(OSVERSIONINFOEX), 6, 1, 0, 0, {0}, 0, 0, 0, 0, 0}; - DWORDLONG comparison = 0; - VER_SET_CONDITION(comparison, VER_MAJORVERSION, VER_GREATER_EQUAL); - VER_SET_CONDITION(comparison, VER_MINORVERSION, VER_GREATER_EQUAL); - return VerifyVersionInfo( - &version_info, VER_MAJORVERSION | VER_MINORVERSION, comparison); -} - -bool StatAllFilesInDir(const string& dir, map* stamps, - string* err) { - // FindExInfoBasic is 30% faster than FindExInfoStandard. - static bool can_use_basic_info = IsWindows7OrLater(); - // This is not in earlier SDKs. - const FINDEX_INFO_LEVELS kFindExInfoBasic = - static_cast(1); - FINDEX_INFO_LEVELS level = - can_use_basic_info ? kFindExInfoBasic : FindExInfoStandard; - WIN32_FIND_DATAA ffd; - HANDLE find_handle = FindFirstFileExA((dir + "\\*").c_str(), level, &ffd, - FindExSearchNameMatch, NULL, 0); - - if (find_handle == INVALID_HANDLE_VALUE) { - DWORD win_err = GetLastError(); - if (win_err == ERROR_FILE_NOT_FOUND || win_err == ERROR_PATH_NOT_FOUND) - return true; - *err = "FindFirstFileExA(" + dir + "): " + GetLastErrorString(); - return false; - } - do { - string lowername = ffd.cFileName; - if (lowername == "..") { - // Seems to just copy the timestamp for ".." from ".", which is wrong. - // This is the case at least on NTFS under Windows 7. - continue; - } - transform(lowername.begin(), lowername.end(), lowername.begin(), ::tolower); - stamps->insert(make_pair(lowername, - TimeStampFromFileTime(ffd.ftLastWriteTime))); - } while (FindNextFileA(find_handle, &ffd)); - FindClose(find_handle); - return true; -} -#endif // _WIN32 - -} // namespace - -// DiskInterface --------------------------------------------------------------- - -bool DiskInterface::MakeDirs(const string& path) { - string dir = DirName(path); - if (dir.empty()) - return true; // Reached root; assume it's there. - string err; - TimeStamp mtime = Stat(dir, &err); - if (mtime < 0) { - Error("%s", err.c_str()); - return false; - } - if (mtime > 0) - return true; // Exists already; we're done. - - // Directory doesn't exist. Try creating its parent first. - bool success = MakeDirs(dir); - if (!success) - return false; - return MakeDir(dir); -} - -// RealDiskInterface ----------------------------------------------------------- - -TimeStamp RealDiskInterface::Stat(const string& path, string* err) const { - METRIC_RECORD("node stat"); -#ifdef _WIN32 - // MSDN: "Naming Files, Paths, and Namespaces" - // http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx - if (!path.empty() && path[0] != '\\' && path.size() > MAX_PATH) { - ostringstream err_stream; - err_stream << "Stat(" << path << "): Filename longer than " << MAX_PATH - << " characters"; - *err = err_stream.str(); - return -1; - } - if (!use_cache_) - return StatSingleFile(path, err); - - string dir = DirName(path); - string base(path.substr(dir.size() ? dir.size() + 1 : 0)); - if (base == "..") { - // StatAllFilesInDir does not report any information for base = "..". - base = "."; - dir = path; - } - - transform(dir.begin(), dir.end(), dir.begin(), ::tolower); - transform(base.begin(), base.end(), base.begin(), ::tolower); - - Cache::iterator ci = cache_.find(dir); - if (ci == cache_.end()) { - ci = cache_.insert(make_pair(dir, DirCache())).first; - if (!StatAllFilesInDir(dir.empty() ? "." : dir, &ci->second, err)) { - cache_.erase(ci); - return -1; - } - } - DirCache::iterator di = ci->second.find(base); - return di != ci->second.end() ? di->second : 0; -#else - struct stat st; - if (stat(path.c_str(), &st) < 0) { - if (errno == ENOENT || errno == ENOTDIR) - return 0; - *err = "stat(" + path + "): " + strerror(errno); - return -1; - } - // Some users (Flatpak) set mtime to 0, this should be harmless - // and avoids conflicting with our return value of 0 meaning - // that it doesn't exist. -#if 0 - if (st.st_mtime == 0) - return 1; -#endif -#if defined(_AIX) - return (int64_t)st.st_mtime * 1000000000LL + st.st_mtime_n; -#elif defined(__APPLE__) - return ((int64_t)st.st_mtimespec.tv_sec * 1000000000LL + - st.st_mtimespec.tv_nsec); -#elif defined(st_mtime) // A macro, so we're likely on modern POSIX. - return (int64_t)st.st_mtim.tv_sec * 1000000000LL + st.st_mtim.tv_nsec; -#else - return (int64_t)st.st_mtime * 1000000000LL + st.st_mtimensec; -#endif -#endif -} - -bool RealDiskInterface::WriteFile(const string& path, const string& contents) { - FILE* fp = fopen(path.c_str(), "w"); - if (fp == NULL) { - Error("WriteFile(%s): Unable to create file. %s", - path.c_str(), strerror(errno)); - return false; - } - - if (fwrite(contents.data(), 1, contents.length(), fp) < contents.length()) { - Error("WriteFile(%s): Unable to write to the file. %s", - path.c_str(), strerror(errno)); - fclose(fp); - return false; - } - - if (fclose(fp) == EOF) { - Error("WriteFile(%s): Unable to close the file. %s", - path.c_str(), strerror(errno)); - return false; - } - - return true; -} - -bool RealDiskInterface::MakeDir(const string& path) { - if (::MakeDir(path) < 0) { - if (errno == EEXIST) { - return true; - } - Error("mkdir(%s): %s", path.c_str(), strerror(errno)); - return false; - } - return true; -} - -FileReader::Status RealDiskInterface::ReadFile(const string& path, - string* contents, - string* err) { - switch (::ReadFile(path, contents, err)) { - case 0: return Okay; - case -ENOENT: return NotFound; - default: return OtherError; - } -} - -int RealDiskInterface::RemoveFile(const string& path) { - if (remove(path.c_str()) < 0) { - switch (errno) { - case ENOENT: - return 1; - default: - Error("remove(%s): %s", path.c_str(), strerror(errno)); - return -1; - } - } else { - return 0; - } -} - -void RealDiskInterface::AllowStatCache(bool allow) { -#ifdef _WIN32 - use_cache_ = allow; - if (!use_cache_) - cache_.clear(); -#endif -} diff --git a/ninja/src/disk_interface.h b/ninja/src/disk_interface.h deleted file mode 100644 index 145e0892f49..00000000000 --- a/ninja/src/disk_interface.h +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_DISK_INTERFACE_H_ -#define NINJA_DISK_INTERFACE_H_ - -#include -#include -using namespace std; - -#include "timestamp.h" - -/// Interface for reading files from disk. See DiskInterface for details. -/// This base offers the minimum interface needed just to read files. -struct FileReader { - virtual ~FileReader() {} - - /// Result of ReadFile. - enum Status { - Okay, - NotFound, - OtherError - }; - - /// Read and store in given string. On success, return Okay. - /// On error, return another Status and fill |err|. - virtual Status ReadFile(const string& path, string* contents, - string* err) = 0; -}; - -/// Interface for accessing the disk. -/// -/// Abstract so it can be mocked out for tests. The real implementation -/// is RealDiskInterface. -struct DiskInterface: public FileReader { - /// stat() a file, returning the mtime, or 0 if missing and -1 on - /// other errors. - virtual TimeStamp Stat(const string& path, string* err) const = 0; - - /// Create a directory, returning false on failure. - virtual bool MakeDir(const string& path) = 0; - - /// Create a file, with the specified name and contents - /// Returns true on success, false on failure - virtual bool WriteFile(const string& path, const string& contents) = 0; - - /// Remove the file named @a path. It behaves like 'rm -f path' so no errors - /// are reported if it does not exists. - /// @returns 0 if the file has been removed, - /// 1 if the file does not exist, and - /// -1 if an error occurs. - virtual int RemoveFile(const string& path) = 0; - - /// Create all the parent directories for path; like mkdir -p - /// `basename path`. - bool MakeDirs(const string& path); -}; - -/// Implementation of DiskInterface that actually hits the disk. -struct RealDiskInterface : public DiskInterface { - RealDiskInterface() -#ifdef _WIN32 - : use_cache_(false) -#endif - {} - virtual ~RealDiskInterface() {} - virtual TimeStamp Stat(const string& path, string* err) const; - virtual bool MakeDir(const string& path); - virtual bool WriteFile(const string& path, const string& contents); - virtual Status ReadFile(const string& path, string* contents, string* err); - virtual int RemoveFile(const string& path); - - /// Whether stat information can be cached. Only has an effect on Windows. - void AllowStatCache(bool allow); - - private: -#ifdef _WIN32 - /// Whether stat information can be cached. - bool use_cache_; - - typedef map DirCache; - // TODO: Neither a map nor a hashmap seems ideal here. If the statcache - // works out, come up with a better data structure. - typedef map Cache; - mutable Cache cache_; -#endif -}; - -#endif // NINJA_DISK_INTERFACE_H_ diff --git a/ninja/src/disk_interface_test.cc b/ninja/src/disk_interface_test.cc deleted file mode 100644 index bac515d235a..00000000000 --- a/ninja/src/disk_interface_test.cc +++ /dev/null @@ -1,322 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -#ifdef _WIN32 -#include -#include -#endif - -#include "disk_interface.h" -#include "graph.h" -#include "test.h" - -namespace { - -struct DiskInterfaceTest : public testing::Test { - virtual void SetUp() { - // These tests do real disk accesses, so create a temp dir. - temp_dir_.CreateAndEnter("Ninja-DiskInterfaceTest"); - } - - virtual void TearDown() { - temp_dir_.Cleanup(); - } - - bool Touch(const char* path) { - FILE *f = fopen(path, "w"); - if (!f) - return false; - return fclose(f) == 0; - } - - ScopedTempDir temp_dir_; - RealDiskInterface disk_; -}; - -TEST_F(DiskInterfaceTest, StatMissingFile) { - string err; - EXPECT_EQ(0, disk_.Stat("nosuchfile", &err)); - EXPECT_EQ("", err); - - // On Windows, the errno for a file in a nonexistent directory - // is different. - EXPECT_EQ(0, disk_.Stat("nosuchdir/nosuchfile", &err)); - EXPECT_EQ("", err); - - // On POSIX systems, the errno is different if a component of the - // path prefix is not a directory. - ASSERT_TRUE(Touch("notadir")); - EXPECT_EQ(0, disk_.Stat("notadir/nosuchfile", &err)); - EXPECT_EQ("", err); -} - -TEST_F(DiskInterfaceTest, StatBadPath) { - string err; -#ifdef _WIN32 - string bad_path("cc:\\foo"); - EXPECT_EQ(-1, disk_.Stat(bad_path, &err)); - EXPECT_NE("", err); -#else - string too_long_name(512, 'x'); - EXPECT_EQ(-1, disk_.Stat(too_long_name, &err)); - EXPECT_NE("", err); -#endif -} - -TEST_F(DiskInterfaceTest, StatExistingFile) { - string err; - ASSERT_TRUE(Touch("file")); - EXPECT_GT(disk_.Stat("file", &err), 1); - EXPECT_EQ("", err); -} - -TEST_F(DiskInterfaceTest, StatExistingDir) { - string err; - ASSERT_TRUE(disk_.MakeDir("subdir")); - ASSERT_TRUE(disk_.MakeDir("subdir/subsubdir")); - EXPECT_GT(disk_.Stat("..", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat(".", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("subdir", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("subdir/subsubdir", &err), 1); - EXPECT_EQ("", err); - - EXPECT_EQ(disk_.Stat("subdir", &err), - disk_.Stat("subdir/.", &err)); - EXPECT_EQ(disk_.Stat("subdir", &err), - disk_.Stat("subdir/subsubdir/..", &err)); - EXPECT_EQ(disk_.Stat("subdir/subsubdir", &err), - disk_.Stat("subdir/subsubdir/.", &err)); -} - -#ifdef _WIN32 -TEST_F(DiskInterfaceTest, StatCache) { - string err; - - ASSERT_TRUE(Touch("file1")); - ASSERT_TRUE(Touch("fiLE2")); - ASSERT_TRUE(disk_.MakeDir("subdir")); - ASSERT_TRUE(disk_.MakeDir("subdir/subsubdir")); - ASSERT_TRUE(Touch("subdir\\subfile1")); - ASSERT_TRUE(Touch("subdir\\SUBFILE2")); - ASSERT_TRUE(Touch("subdir\\SUBFILE3")); - - disk_.AllowStatCache(false); - TimeStamp parent_stat_uncached = disk_.Stat("..", &err); - disk_.AllowStatCache(true); - - EXPECT_GT(disk_.Stat("FIle1", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("file1", &err), 1); - EXPECT_EQ("", err); - - EXPECT_GT(disk_.Stat("subdir/subfile2", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("sUbdir\\suBFile1", &err), 1); - EXPECT_EQ("", err); - - EXPECT_GT(disk_.Stat("..", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat(".", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("subdir", &err), 1); - EXPECT_EQ("", err); - EXPECT_GT(disk_.Stat("subdir/subsubdir", &err), 1); - EXPECT_EQ("", err); - -#ifndef _MSC_VER // TODO: Investigate why. Also see https://github.com/ninja-build/ninja/pull/1423 - EXPECT_EQ(disk_.Stat("subdir", &err), - disk_.Stat("subdir/.", &err)); - EXPECT_EQ("", err); - EXPECT_EQ(disk_.Stat("subdir", &err), - disk_.Stat("subdir/subsubdir/..", &err)); -#endif - EXPECT_EQ("", err); - EXPECT_EQ(disk_.Stat("..", &err), parent_stat_uncached); - EXPECT_EQ("", err); - EXPECT_EQ(disk_.Stat("subdir/subsubdir", &err), - disk_.Stat("subdir/subsubdir/.", &err)); - EXPECT_EQ("", err); - - // Test error cases. - string bad_path("cc:\\foo"); - EXPECT_EQ(-1, disk_.Stat(bad_path, &err)); - EXPECT_NE("", err); err.clear(); - EXPECT_EQ(-1, disk_.Stat(bad_path, &err)); - EXPECT_NE("", err); err.clear(); - EXPECT_EQ(0, disk_.Stat("nosuchfile", &err)); - EXPECT_EQ("", err); - EXPECT_EQ(0, disk_.Stat("nosuchdir/nosuchfile", &err)); - EXPECT_EQ("", err); -} -#endif - -TEST_F(DiskInterfaceTest, ReadFile) { - string err; - std::string content; - ASSERT_EQ(DiskInterface::NotFound, - disk_.ReadFile("foobar", &content, &err)); - EXPECT_EQ("", content); - EXPECT_NE("", err); // actual value is platform-specific - err.clear(); - - const char* kTestFile = "testfile"; - FILE* f = fopen(kTestFile, "wb"); - ASSERT_TRUE(f); - const char* kTestContent = "test content\nok"; - fprintf(f, "%s", kTestContent); - ASSERT_EQ(0, fclose(f)); - - ASSERT_EQ(DiskInterface::Okay, - disk_.ReadFile(kTestFile, &content, &err)); - EXPECT_EQ(kTestContent, content); - EXPECT_EQ("", err); -} - -TEST_F(DiskInterfaceTest, MakeDirs) { - string path = "path/with/double//slash/"; - EXPECT_TRUE(disk_.MakeDirs(path.c_str())); - FILE* f = fopen((path + "a_file").c_str(), "w"); - EXPECT_TRUE(f); - EXPECT_EQ(0, fclose(f)); -#ifdef _WIN32 - string path2 = "another\\with\\back\\\\slashes\\"; - EXPECT_TRUE(disk_.MakeDirs(path2.c_str())); - FILE* f2 = fopen((path2 + "a_file").c_str(), "w"); - EXPECT_TRUE(f2); - EXPECT_EQ(0, fclose(f2)); -#endif -} - -TEST_F(DiskInterfaceTest, RemoveFile) { - const char* kFileName = "file-to-remove"; - ASSERT_TRUE(Touch(kFileName)); - EXPECT_EQ(0, disk_.RemoveFile(kFileName)); - EXPECT_EQ(1, disk_.RemoveFile(kFileName)); - EXPECT_EQ(1, disk_.RemoveFile("does not exist")); -} - -struct StatTest : public StateTestWithBuiltinRules, - public DiskInterface { - StatTest() : scan_(&state_, NULL, NULL, this, NULL) {} - - // DiskInterface implementation. - virtual TimeStamp Stat(const string& path, string* err) const; - virtual bool WriteFile(const string& path, const string& contents) { - assert(false); - return true; - } - virtual bool MakeDir(const string& path) { - assert(false); - return false; - } - virtual Status ReadFile(const string& path, string* contents, string* err) { - assert(false); - return NotFound; - } - virtual int RemoveFile(const string& path) { - assert(false); - return 0; - } - - DependencyScan scan_; - map mtimes_; - mutable vector stats_; -}; - -TimeStamp StatTest::Stat(const string& path, string* err) const { - stats_.push_back(path); - map::const_iterator i = mtimes_.find(path); - if (i == mtimes_.end()) - return 0; // File not found. - return i->second; -} - -TEST_F(StatTest, Simple) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in\n")); - - Node* out = GetNode("out"); - string err; - EXPECT_TRUE(out->Stat(this, &err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, stats_.size()); - scan_.RecomputeDirty(out, NULL); - ASSERT_EQ(2u, stats_.size()); - ASSERT_EQ("out", stats_[0]); - ASSERT_EQ("in", stats_[1]); -} - -TEST_F(StatTest, TwoStep) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n")); - - Node* out = GetNode("out"); - string err; - EXPECT_TRUE(out->Stat(this, &err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, stats_.size()); - scan_.RecomputeDirty(out, NULL); - ASSERT_EQ(3u, stats_.size()); - ASSERT_EQ("out", stats_[0]); - ASSERT_TRUE(GetNode("out")->dirty()); - ASSERT_EQ("mid", stats_[1]); - ASSERT_TRUE(GetNode("mid")->dirty()); - ASSERT_EQ("in", stats_[2]); -} - -TEST_F(StatTest, Tree) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid1 mid2\n" -"build mid1: cat in11 in12\n" -"build mid2: cat in21 in22\n")); - - Node* out = GetNode("out"); - string err; - EXPECT_TRUE(out->Stat(this, &err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, stats_.size()); - scan_.RecomputeDirty(out, NULL); - ASSERT_EQ(1u + 6u, stats_.size()); - ASSERT_EQ("mid1", stats_[1]); - ASSERT_TRUE(GetNode("mid1")->dirty()); - ASSERT_EQ("in11", stats_[2]); -} - -TEST_F(StatTest, Middle) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n")); - - mtimes_["in"] = 1; - mtimes_["mid"] = 0; // missing - mtimes_["out"] = 1; - - Node* out = GetNode("out"); - string err; - EXPECT_TRUE(out->Stat(this, &err)); - EXPECT_EQ("", err); - ASSERT_EQ(1u, stats_.size()); - scan_.RecomputeDirty(out, NULL); - ASSERT_FALSE(GetNode("in")->dirty()); - ASSERT_TRUE(GetNode("mid")->dirty()); - ASSERT_TRUE(GetNode("out")->dirty()); -} - -} // namespace diff --git a/ninja/src/dyndep.cc b/ninja/src/dyndep.cc deleted file mode 100644 index 4a49bf3f6f1..00000000000 --- a/ninja/src/dyndep.cc +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "dyndep.h" - -#include -#include - -#include "debug_flags.h" -#include "disk_interface.h" -#include "dyndep_parser.h" -#include "graph.h" -#include "state.h" -#include "util.h" - -bool DyndepLoader::LoadDyndeps(Node* node, std::string* err) const { - DyndepFile ddf; - return LoadDyndeps(node, &ddf, err); -} - -bool DyndepLoader::LoadDyndeps(Node* node, DyndepFile* ddf, - std::string* err) const { - // We are loading the dyndep file now so it is no longer pending. - node->set_dyndep_pending(false); - - // Load the dyndep information from the file. - EXPLAIN("loading dyndep file '%s'", node->path().c_str()); - if (!LoadDyndepFile(node, ddf, err)) - return false; - - // Update each edge that specified this node as its dyndep binding. - for (DyndepFile::const_iterator ddi = ddf->begin(); - ddi != ddf->end(); ++ddi) { - Dyndeps const& dyndeps = ddi->second; - if (!UpdateEdge(ddi->first, &dyndeps, err)) { - return false; - } - } - // Some invariant provided by dep gen: - // - It has to be an input - // - It has to be marked as dyndep for each edge listed - // Reject extra outputs in dyndep file. - - - return true; -} - -bool DyndepLoader::UpdateEdge(Edge* edge, Dyndeps const* dyndeps, - std::string* err) { - // Add dyndep-discovered bindings to the edge. - // We know the edge already has its own binding - // scope because it has a "dyndep" binding. - // Add the dyndep-discovered inputs to the edge. - edge->inputs_.insert(edge->inputs_.end() - edge->order_only_deps_, - dyndeps->implicit_inputs_.begin(), - dyndeps->implicit_inputs_.end()); - edge->implicit_deps_ += dyndeps->implicit_inputs_.size(); - - // Add this edge as outgoing from each new input. - for (std::vector::const_iterator i = - dyndeps->implicit_inputs_.begin(); - i != dyndeps->implicit_inputs_.end(); ++i) - (*i)->AddOutEdge(edge); - - return true; -} - -bool DyndepLoader::LoadDyndepFile(Node* file, DyndepFile* ddf, - std::string* err) const { - DyndepParser parser(state_, disk_interface_, ddf); - return parser.Load(file->path(), err); -} diff --git a/ninja/src/dyndep.h b/ninja/src/dyndep.h deleted file mode 100644 index f2c66040543..00000000000 --- a/ninja/src/dyndep.h +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_DYNDEP_LOADER_H_ -#define NINJA_DYNDEP_LOADER_H_ - -#include -#include -#include - -struct DiskInterface; -struct Edge; -struct Node; -struct State; - -/// Store dynamically-discovered dependency information for one edge. -#if 1 -struct Dyndeps { - std::vector implicit_inputs_; -}; -#else -struct Dyndeps { - Dyndeps() : used_(false), restat_(false) {} - bool used_; - bool restat_; - std::vector implicit_inputs_; - std::vector implicit_outputs_; -}; - -#endif -/// Store data loaded from one dyndep file. Map from an edge -/// to its dynamically-discovered dependency information. -/// This is a struct rather than a typedef so that we can -/// forward-declare it in other headers. -struct DyndepFile: public std::map {}; - -/// DyndepLoader loads dynamically discovered dependencies, as -/// referenced via the "dyndep" attribute in build files. -struct DyndepLoader { - DyndepLoader(State* state, DiskInterface* disk_interface) - : state_(state), disk_interface_(disk_interface) {} - - /// Load a dyndep file from the given node's path and update the - /// build graph with the new information. One overload accepts - /// a caller-owned 'DyndepFile' object in which to store the - /// information loaded from the dyndep file. - bool LoadDyndeps(Node* node, std::string* err) const; - bool LoadDyndeps(Node* node, DyndepFile* ddf, std::string* err) const; - static bool UpdateEdge(Edge* edge, Dyndeps const* dyndeps, std::string* err); - private: - bool LoadDyndepFile(Node* file, DyndepFile* ddf, std::string* err) const; - - - - State* state_; - DiskInterface* disk_interface_; -}; - -#endif // NINJA_DYNDEP_LOADER_H_ diff --git a/ninja/src/dyndep_parser.cc b/ninja/src/dyndep_parser.cc deleted file mode 100644 index daf9f9fd9e7..00000000000 --- a/ninja/src/dyndep_parser.cc +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "dyndep_parser.h" - -#include - -#include "dyndep.h" -#include "graph.h" -#include "state.h" -#include "util.h" -#include "version.h" - -DyndepParser::DyndepParser(State* state, DiskInterface* file_reader, - DyndepFile* dyndep_file) - : Parser(state, file_reader) - , dyndep_file_(dyndep_file) { -} - -bool DyndepParser::Parse(const string& filename, const string& input, - string* err) { - lexer_.Start(filename, input); - for (;;) { - if(lexer_.EndAfterEatWhiteSpace()){ - return true; - } - if(!ParseEdge(err)){ - return false; - } - } - return false; // not reached -} - -bool DyndepParser::ParseEdge(string* err) { - // Parse one explicit output. We expect it to already have an edge. - // We will record its dynamically-discovered dependency information. - Dyndeps* dyndeps = NULL; - Edge* edge = NULL; - { - string path; - if(!lexer_.ReadSimplePath(&path)){ - return lexer_.Error("expected path",err); - } - string path_err; - uint64_t slash_bits; - if (!CanonicalizePath(&path, &slash_bits, &path_err)) - return lexer_.Error(path_err, err); - Node* node = state_->LookupNode(path); - if (!node || !node->in_edge()) - return lexer_.Error("no build statement exists for '" + path + "'", err); - edge = node->in_edge(); - std::pair res = - dyndep_file_->insert(DyndepFile::value_type(edge, Dyndeps())); - if (!res.second) - return lexer_.Error("multiple statements for '" + path + "'", err); - dyndeps = &res.first->second; - } - if (!ExpectToken(Lexer::COLON, err)) - return false; - - // Parse implicit inputs, if any. - vectorins; - for (;;) { - string in; - if (!lexer_.ReadSimplePath(&in)) { - break; - } - ins.push_back(in); - } - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - dyndeps->implicit_inputs_.reserve(ins.size()); - string path_err; - for (vector::iterator i = ins.begin(); i != ins.end(); ++i) { - string path = *i; - uint64_t slash_bits; - if (!CanonicalizePath(&path, &slash_bits, &path_err)) - return lexer_.Error(path_err, err); - Node* n = state_->GetNode(path, slash_bits); - dyndeps->implicit_inputs_.push_back(n); - } - - return true; -} diff --git a/ninja/src/dyndep_parser.h b/ninja/src/dyndep_parser.h deleted file mode 100644 index 695c6f17aa0..00000000000 --- a/ninja/src/dyndep_parser.h +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_DYNDEP_PARSER_H_ -#define NINJA_DYNDEP_PARSER_H_ - - -#include "parser.h" - -struct DyndepFile; - - -/// Parses dyndep files. -struct DyndepParser: public Parser { - DyndepParser(State* state, DiskInterface* file_reader, - DyndepFile* dyndep_file); -private: - /// Parse a file, given its contents as a string. - bool Parse(const string& filename, const string& input, string* err); - bool ParseEdge(string* err); - DyndepFile* dyndep_file_; - -}; - -#endif // NINJA_DYNDEP_PARSER_H_ diff --git a/ninja/src/dyndep_parser_test.cc b/ninja/src/dyndep_parser_test.cc deleted file mode 100644 index 39ec6574f2c..00000000000 --- a/ninja/src/dyndep_parser_test.cc +++ /dev/null @@ -1,512 +0,0 @@ -// Copyright 2015 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "dyndep_parser.h" - -#include -#include - -#include "dyndep.h" -#include "graph.h" -#include "state.h" -#include "test.h" - -struct DyndepParserTest : public testing::Test { - void AssertParse(const char* input) { - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_TRUE(parser.ParseTest(input, &err)); - ASSERT_EQ("", err); - } - - virtual void SetUp() { - ::AssertParse(&state_, -"rule touch\n" -" command = touch $out\n" -"build out otherout: touch\n"); - } - - State state_; - VirtualFileSystem fs_; - DyndepFile dyndep_file_; -}; - -TEST_F(DyndepParserTest, Empty) { - const char kInput[] = -""; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n", err); -} - -TEST_F(DyndepParserTest, Version1) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n")); -} - -TEST_F(DyndepParserTest, Version1Extra) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1-extra\n")); -} - -TEST_F(DyndepParserTest, Version1_0) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1.0\n")); -} - -TEST_F(DyndepParserTest, Version1_0Extra) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1.0-extra\n")); -} - -TEST_F(DyndepParserTest, CommentVersion) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"# comment\n" -"ninja_dyndep_version = 1\n")); -} - -TEST_F(DyndepParserTest, BlankLineVersion) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"\n" -"ninja_dyndep_version = 1\n")); -} - -TEST_F(DyndepParserTest, VersionCRLF) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\r\n")); -} - -TEST_F(DyndepParserTest, CommentVersionCRLF) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"# comment\r\n" -"ninja_dyndep_version = 1\r\n")); -} - -TEST_F(DyndepParserTest, BlankLineVersionCRLF) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"\r\n" -"ninja_dyndep_version = 1\r\n")); -} - -TEST_F(DyndepParserTest, VersionUnexpectedEOF) { - const char kInput[] = -"ninja_dyndep_version = 1.0"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: unexpected EOF\n" - "ninja_dyndep_version = 1.0\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, UnsupportedVersion0) { - const char kInput[] = -"ninja_dyndep_version = 0\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: unsupported 'ninja_dyndep_version = 0'\n" - "ninja_dyndep_version = 0\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, UnsupportedVersion1_1) { - const char kInput[] = -"ninja_dyndep_version = 1.1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: unsupported 'ninja_dyndep_version = 1.1'\n" - "ninja_dyndep_version = 1.1\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, DuplicateVersion) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"ninja_dyndep_version = 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: unexpected identifier\n", err); -} - -TEST_F(DyndepParserTest, MissingVersionOtherVar) { - const char kInput[] = -"not_ninja_dyndep_version = 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n" - "not_ninja_dyndep_version = 1\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, MissingVersionBuild) { - const char kInput[] = -"build out: dyndep\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: expected 'ninja_dyndep_version = ...'\n", err); -} - -TEST_F(DyndepParserTest, UnexpectedEqual) { - const char kInput[] = -"= 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: unexpected '='\n", err); -} - -TEST_F(DyndepParserTest, UnexpectedIndent) { - const char kInput[] = -" = 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:1: unexpected indent\n", err); -} - -TEST_F(DyndepParserTest, OutDuplicate) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -"build out: dyndep\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:3: multiple statements for 'out'\n" - "build out: dyndep\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OutDuplicateThroughOther) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -"build otherout: dyndep\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:3: multiple statements for 'otherout'\n" - "build otherout: dyndep\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, NoOutEOF) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: unexpected EOF\n" - "build\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, NoOutColon) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build :\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: expected path\n" - "build :\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OutNoStatement) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build missing: dyndep\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: no build statement exists for 'missing'\n" - "build missing: dyndep\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OutEOF) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: unexpected EOF\n" - "build out\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OutNoRule) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out:"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: expected build command name 'dyndep'\n" - "build out:\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OutBadRule) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: touch"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: expected build command name 'dyndep'\n" - "build out: touch\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, BuildEOF) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: unexpected EOF\n" - "build out: dyndep\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, ExplicitOut) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out exp: dyndep\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: explicit outputs not supported\n" - "build out exp: dyndep\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, ExplicitIn) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep exp\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: explicit inputs not supported\n" - "build out: dyndep exp\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, OrderOnlyIn) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep ||\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:2: order-only inputs not supported\n" - "build out: dyndep ||\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, BadBinding) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -" not_restat = 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:3: binding is not 'restat'\n" - " not_restat = 1\n" - " ^ near here", err); -} - -TEST_F(DyndepParserTest, RestatTwice) { - const char kInput[] = -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -" restat = 1\n" -" restat = 1\n"; - DyndepParser parser(&state_, &fs_, &dyndep_file_); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:4: unexpected indent\n", err); -} - -TEST_F(DyndepParserTest, NoImplicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out: dyndep\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, EmptyImplicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out | : dyndep |\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, ImplicitIn) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out: dyndep | impin\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - ASSERT_EQ(1u, i->second.implicit_inputs_.size()); - EXPECT_EQ("impin", i->second.implicit_inputs_[0]->path()); -} - -TEST_F(DyndepParserTest, ImplicitIns) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out: dyndep | impin1 impin2\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - ASSERT_EQ(2u, i->second.implicit_inputs_.size()); - EXPECT_EQ("impin1", i->second.implicit_inputs_[0]->path()); - EXPECT_EQ("impin2", i->second.implicit_inputs_[1]->path()); -} - -TEST_F(DyndepParserTest, ImplicitOut) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out | impout: dyndep\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - ASSERT_EQ(1u, i->second.implicit_outputs_.size()); - EXPECT_EQ("impout", i->second.implicit_outputs_[0]->path()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, ImplicitOuts) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out | impout1 impout2 : dyndep\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - ASSERT_EQ(2u, i->second.implicit_outputs_.size()); - EXPECT_EQ("impout1", i->second.implicit_outputs_[0]->path()); - EXPECT_EQ("impout2", i->second.implicit_outputs_[1]->path()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, ImplicitInsAndOuts) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out | impout1 impout2: dyndep | impin1 impin2\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - ASSERT_EQ(2u, i->second.implicit_outputs_.size()); - EXPECT_EQ("impout1", i->second.implicit_outputs_[0]->path()); - EXPECT_EQ("impout2", i->second.implicit_outputs_[1]->path()); - ASSERT_EQ(2u, i->second.implicit_inputs_.size()); - EXPECT_EQ("impin1", i->second.implicit_inputs_[0]->path()); - EXPECT_EQ("impin2", i->second.implicit_inputs_[1]->path()); -} - -TEST_F(DyndepParserTest, Restat) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -" restat = 1\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(true, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, OtherOutput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build otherout: dyndep\n")); - - EXPECT_EQ(1u, dyndep_file_.size()); - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); -} - -TEST_F(DyndepParserTest, MultipleEdges) { - ::AssertParse(&state_, -"build out2: touch\n"); - ASSERT_EQ(2u, state_.edges_.size()); - ASSERT_EQ(1u, state_.edges_[1]->outputs_.size()); - EXPECT_EQ("out2", state_.edges_[1]->outputs_[0]->path()); - EXPECT_EQ(0u, state_.edges_[0]->inputs_.size()); - - ASSERT_NO_FATAL_FAILURE(AssertParse( -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -"build out2: dyndep\n" -" restat = 1\n")); - - EXPECT_EQ(2u, dyndep_file_.size()); - { - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[0]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(false, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); - } - { - DyndepFile::iterator i = dyndep_file_.find(state_.edges_[1]); - ASSERT_NE(i, dyndep_file_.end()); - EXPECT_EQ(true, i->second.restat_); - EXPECT_EQ(0u, i->second.implicit_outputs_.size()); - EXPECT_EQ(0u, i->second.implicit_inputs_.size()); - } -} diff --git a/ninja/src/edit_distance.cc b/ninja/src/edit_distance.cc deleted file mode 100644 index 3bb62b85899..00000000000 --- a/ninja/src/edit_distance.cc +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "edit_distance.h" - -#include -#include - -int EditDistance(const StringPiece& s1, - const StringPiece& s2, - bool allow_replacements, - int max_edit_distance) { - // The algorithm implemented below is the "classic" - // dynamic-programming algorithm for computing the Levenshtein - // distance, which is described here: - // - // http://en.wikipedia.org/wiki/Levenshtein_distance - // - // Although the algorithm is typically described using an m x n - // array, only one row plus one element are used at a time, so this - // implementation just keeps one vector for the row. To update one entry, - // only the entries to the left, top, and top-left are needed. The left - // entry is in row[x-1], the top entry is what's in row[x] from the last - // iteration, and the top-left entry is stored in previous. - int m = s1.len_; - int n = s2.len_; - - vector row(n + 1); - for (int i = 1; i <= n; ++i) - row[i] = i; - - for (int y = 1; y <= m; ++y) { - row[0] = y; - int best_this_row = row[0]; - - int previous = y - 1; - for (int x = 1; x <= n; ++x) { - int old_row = row[x]; - if (allow_replacements) { - row[x] = min(previous + (s1.str_[y - 1] == s2.str_[x - 1] ? 0 : 1), - min(row[x - 1], row[x]) + 1); - } - else { - if (s1.str_[y - 1] == s2.str_[x - 1]) - row[x] = previous; - else - row[x] = min(row[x - 1], row[x]) + 1; - } - previous = old_row; - best_this_row = min(best_this_row, row[x]); - } - - if (max_edit_distance && best_this_row > max_edit_distance) - return max_edit_distance + 1; - } - - return row[n]; -} diff --git a/ninja/src/edit_distance.h b/ninja/src/edit_distance.h deleted file mode 100644 index 45ae4aecd31..00000000000 --- a/ninja/src/edit_distance.h +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_EDIT_DISTANCE_H_ -#define NINJA_EDIT_DISTANCE_H_ - -#include "string_piece.h" - -int EditDistance(const StringPiece& s1, - const StringPiece& s2, - bool allow_replacements = true, - int max_edit_distance = 0); - -#endif // NINJA_EDIT_DISTANCE_H_ diff --git a/ninja/src/edit_distance_test.cc b/ninja/src/edit_distance_test.cc deleted file mode 100644 index 9dc0f827a75..00000000000 --- a/ninja/src/edit_distance_test.cc +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "edit_distance.h" - -#include "test.h" - -TEST(EditDistanceTest, TestEmpty) { - EXPECT_EQ(5, EditDistance("", "ninja")); - EXPECT_EQ(5, EditDistance("ninja", "")); - EXPECT_EQ(0, EditDistance("", "")); -} - -TEST(EditDistanceTest, TestMaxDistance) { - const bool allow_replacements = true; - for (int max_distance = 1; max_distance < 7; ++max_distance) { - EXPECT_EQ(max_distance + 1, - EditDistance("abcdefghijklmnop", "ponmlkjihgfedcba", - allow_replacements, max_distance)); - } -} - -TEST(EditDistanceTest, TestAllowReplacements) { - bool allow_replacements = true; - EXPECT_EQ(1, EditDistance("ninja", "njnja", allow_replacements)); - EXPECT_EQ(1, EditDistance("njnja", "ninja", allow_replacements)); - - allow_replacements = false; - EXPECT_EQ(2, EditDistance("ninja", "njnja", allow_replacements)); - EXPECT_EQ(2, EditDistance("njnja", "ninja", allow_replacements)); -} - -TEST(EditDistanceTest, TestBasics) { - EXPECT_EQ(0, EditDistance("browser_tests", "browser_tests")); - EXPECT_EQ(1, EditDistance("browser_test", "browser_tests")); - EXPECT_EQ(1, EditDistance("browser_tests", "browser_test")); -} diff --git a/ninja/src/eval_env.cc b/ninja/src/eval_env.cc deleted file mode 100644 index e9b6c437e3b..00000000000 --- a/ninja/src/eval_env.cc +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include - -#include "eval_env.h" - -string BindingEnv::LookupVariable(const string& var) { - map::iterator i = bindings_.find(var); - if (i != bindings_.end()) - return i->second; - if (parent_) - return parent_->LookupVariable(var); - return ""; -} - -void BindingEnv::AddBinding(const string& key, const string& val) { - bindings_[key] = val; -} - -void BindingEnv::AddRule(const Rule* rule) { - assert(LookupRuleCurrentScope(rule->name()) == NULL); - rules_[rule->name()] = rule; -} - -const Rule* BindingEnv::LookupRuleCurrentScope(const string& rule_name) { - map::iterator i = rules_.find(rule_name); - if (i == rules_.end()) - return NULL; - return i->second; -} - -const Rule* BindingEnv::LookupRule(const string& rule_name) { - map::iterator i = rules_.find(rule_name); - if (i != rules_.end()) - return i->second; - if (parent_) - return parent_->LookupRule(rule_name); - return NULL; -} - -void Rule::AddBinding(const string& key, const EvalString& val) { - bindings_[key] = val; -} - -const EvalString* Rule::GetBinding(const string& key) const { - Bindings::const_iterator i = bindings_.find(key); - if (i == bindings_.end()) - return NULL; - return &i->second; -} - -// static -bool Rule::IsReservedBinding(const string& var) { - return var == "command" || - var == "depfile" || - var == "dyndep" || - var == "description" || - var == "deps" || - var == "generator" || - var == "pool" || - var == "restat" || - var == "rspfile" || - var == "rspfile_content" || - var == "msvc_deps_prefix"; -} - -const map& BindingEnv::GetRules() const { - return rules_; -} - -string BindingEnv::LookupWithFallback(const string& var, - const EvalString* eval, - Env* env) { - map::iterator i = bindings_.find(var); - if (i != bindings_.end()) - return i->second; - - if (eval) - return eval->Evaluate(env); - - if (parent_) - return parent_->LookupVariable(var); - - return ""; -} - -string EvalString::Evaluate(Env* env) const { - string result; - for (TokenList::const_iterator i = parsed_.begin(); i != parsed_.end(); ++i) { - if (i->second == RAW) - result.append(i->first); - else - result.append(env->LookupVariable(i->first)); - } - return result; -} - -void EvalString::AddText(StringPiece text) { - // Add it to the end of an existing RAW token if possible. - if (!parsed_.empty() && parsed_.back().second == RAW) { - parsed_.back().first.append(text.str_, text.len_); - } else { - parsed_.push_back(make_pair(text.AsString(), RAW)); - } -} -void EvalString::AddSpecial(StringPiece text) { - parsed_.push_back(make_pair(text.AsString(), SPECIAL)); -} - -string EvalString::Serialize() const { - string result; - for (TokenList::const_iterator i = parsed_.begin(); - i != parsed_.end(); ++i) { - result.append("["); - if (i->second == SPECIAL) - result.append("$"); - result.append(i->first); - result.append("]"); - } - return result; -} - -string EvalString::Unparse() const { - string result; - for (TokenList::const_iterator i = parsed_.begin(); - i != parsed_.end(); ++i) { - bool special = (i->second == SPECIAL); - if (special) - result.append("${"); - result.append(i->first); - if (special) - result.append("}"); - } - return result; -} diff --git a/ninja/src/eval_env.h b/ninja/src/eval_env.h deleted file mode 100644 index 8fb9bf4e339..00000000000 --- a/ninja/src/eval_env.h +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_EVAL_ENV_H_ -#define NINJA_EVAL_ENV_H_ - -#include -#include -#include -using namespace std; - -#include "string_piece.h" - -struct Rule; - -/// An interface for a scope for variable (e.g. "$foo") lookups. -struct Env { - virtual ~Env() {} - virtual string LookupVariable(const string& var) = 0; -}; - -/// A tokenized string that contains variable references. -/// Can be evaluated relative to an Env. -struct EvalString { - /// @return The evaluated string with variable expanded using value found in - /// environment @a env. - string Evaluate(Env* env) const; - - /// @return The string with variables not expanded. - string Unparse() const; - - void Clear() { parsed_.clear(); } - bool empty() const { return parsed_.empty(); } - - void AddText(StringPiece text); - void AddSpecial(StringPiece text); - - /// Construct a human-readable representation of the parsed state, - /// for use in tests. - string Serialize() const; - -private: - enum TokenType { RAW, SPECIAL }; - typedef vector > TokenList; - TokenList parsed_; -}; - -/// An invokable build command and associated metadata (description, etc.). -struct Rule { - explicit Rule(const string& name) : name_(name) {} - - const string& name() const { return name_; } - - void AddBinding(const string& key, const EvalString& val); - - static bool IsReservedBinding(const string& var); - - const EvalString* GetBinding(const string& key) const; - - private: - // Allow the parsers to reach into this object and fill out its fields. - friend struct ManifestParser; - - string name_; - typedef map Bindings; - Bindings bindings_; -}; - -/// An Env which contains a mapping of variables to values -/// as well as a pointer to a parent scope. -struct BindingEnv : public Env { - BindingEnv() : parent_(NULL) {} - explicit BindingEnv(BindingEnv* parent) : parent_(parent) {} - - virtual ~BindingEnv() {} - virtual string LookupVariable(const string& var); - - void AddRule(const Rule* rule); - const Rule* LookupRule(const string& rule_name); - const Rule* LookupRuleCurrentScope(const string& rule_name); - const map& GetRules() const; - - void AddBinding(const string& key, const string& val); - - /// This is tricky. Edges want lookup scope to go in this order: - /// 1) value set on edge itself (edge_->env_) - /// 2) value set on rule, with expansion in the edge's scope - /// 3) value set on enclosing scope of edge (edge_->env_->parent_) - /// This function takes as parameters the necessary info to do (2). - string LookupWithFallback(const string& var, const EvalString* eval, - Env* env); - -private: - map bindings_; - map rules_; - BindingEnv* parent_; -}; - -#endif // NINJA_EVAL_ENV_H_ diff --git a/ninja/src/exit_status.h b/ninja/src/exit_status.h deleted file mode 100644 index a714ece791f..00000000000 --- a/ninja/src/exit_status.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_EXIT_STATUS_H_ -#define NINJA_EXIT_STATUS_H_ - -enum ExitStatus { - ExitSuccess, - ExitFailure, - ExitInterrupted -}; - -#endif // NINJA_EXIT_STATUS_H_ diff --git a/ninja/src/gen_doxygen_mainpage.sh b/ninja/src/gen_doxygen_mainpage.sh deleted file mode 100755 index d1599477ebd..00000000000 --- a/ninja/src/gen_doxygen_mainpage.sh +++ /dev/null @@ -1,92 +0,0 @@ -#!/bin/sh - -# Copyright 2011 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -o errexit -set -o nounset - -STATUS=0 - -# Print each of its arguments on stderr (one per line) prefixed by the -# basename of this script. -stderr() -{ - local me=$(basename "$0") - local i - for i - do - echo >&2 "$me: $i" - done -} - -# Print each of its arguments on stderr (one per line) prefixed by the -# basename of this script and 'error'. -error() -{ - local i - for i - do - stderr "error: $i" - done - STATUS=1 -} - -generate_header() -{ - cat <&2 "usage: $0 inputs..." - exit 1 -fi - -generate_header -for i in "$@" -do - include_file "$i" -done -generate_footer - -exit $STATUS diff --git a/ninja/src/getopt.c b/ninja/src/getopt.c deleted file mode 100644 index 861f07f3a25..00000000000 --- a/ninja/src/getopt.c +++ /dev/null @@ -1,410 +0,0 @@ -/**************************************************************************** - -getopt.c - Read command line options - -AUTHOR: Gregory Pietsch -CREATED Fri Jan 10 21:13:05 1997 - -DESCRIPTION: - -The getopt() function parses the command line arguments. Its arguments argc -and argv are the argument count and array as passed to the main() function -on program invocation. The argument optstring is a list of available option -characters. If such a character is followed by a colon (`:'), the option -takes an argument, which is placed in optarg. If such a character is -followed by two colons, the option takes an optional argument, which is -placed in optarg. If the option does not take an argument, optarg is NULL. - -The external variable optind is the index of the next array element of argv -to be processed; it communicates from one call to the next which element to -process. - -The getopt_long() function works like getopt() except that it also accepts -long options started by two dashes `--'. If these take values, it is either -in the form - ---arg=value - - or - ---arg value - -It takes the additional arguments longopts which is a pointer to the first -element of an array of type GETOPT_LONG_OPTION_T. The last element of the -array has to be filled with NULL for the name field. - -The longind pointer points to the index of the current long option relative -to longopts if it is non-NULL. - -The getopt() function returns the option character if the option was found -successfully, `:' if there was a missing parameter for one of the options, -`?' for an unknown option character, and EOF for the end of the option list. - -The getopt_long() function's return value is described in the header file. - -The function getopt_long_only() is identical to getopt_long(), except that a -plus sign `+' can introduce long options as well as `--'. - -The following describes how to deal with options that follow non-option -argv-elements. - -If the caller did not specify anything, the default is REQUIRE_ORDER if the -environment variable POSIXLY_CORRECT is defined, PERMUTE otherwise. - -REQUIRE_ORDER means don't recognize them as options; stop option processing -when the first non-option is seen. This is what Unix does. This mode of -operation is selected by either setting the environment variable -POSIXLY_CORRECT, or using `+' as the first character of the optstring -parameter. - -PERMUTE is the default. We permute the contents of ARGV as we scan, so that -eventually all the non-options are at the end. This allows options to be -given in any order, even with programs that were not written to expect this. - -RETURN_IN_ORDER is an option available to programs that were written to -expect options and other argv-elements in any order and that care about the -ordering of the two. We describe each non-option argv-element as if it were -the argument of an option with character code 1. Using `-' as the first -character of the optstring parameter selects this mode of operation. - -The special argument `--' forces an end of option-scanning regardless of the -value of ordering. In the case of RETURN_IN_ORDER, only `--' can cause -getopt() and friends to return EOF with optind != argc. - -COPYRIGHT NOTICE AND DISCLAIMER: - -Copyright (C) 1997 Gregory Pietsch - -This file and the accompanying getopt.h header file are hereby placed in the -public domain without restrictions. Just give the author credit, don't -claim you wrote it or prevent anyone else from using it. - -Gregory Pietsch's current e-mail address: -gpietsch@comcast.net -****************************************************************************/ - -/* include files */ -#include -#include -#include -#ifndef GETOPT_H -#include "getopt.h" -#endif - -/* macros */ - -/* types */ -typedef enum GETOPT_ORDERING_T -{ - PERMUTE, - RETURN_IN_ORDER, - REQUIRE_ORDER -} GETOPT_ORDERING_T; - -/* globally-defined variables */ -char *optarg = NULL; -int optind = 0; -int opterr = 1; -int optopt = '?'; - -/* functions */ - -/* reverse_argv_elements: reverses num elements starting at argv */ -static void -reverse_argv_elements (char **argv, int num) -{ - int i; - char *tmp; - - for (i = 0; i < (num >> 1); i++) - { - tmp = argv[i]; - argv[i] = argv[num - i - 1]; - argv[num - i - 1] = tmp; - } -} - -/* permute: swap two blocks of argv-elements given their lengths */ -static void -permute (char **argv, int len1, int len2) -{ - reverse_argv_elements (argv, len1); - reverse_argv_elements (argv, len1 + len2); - reverse_argv_elements (argv, len2); -} - -/* is_option: is this argv-element an option or the end of the option list? */ -static int -is_option (char *argv_element, int only) -{ - return ((argv_element == NULL) - || (argv_element[0] == '-') || (only && argv_element[0] == '+')); -} - -/* getopt_internal: the function that does all the dirty work */ -static int -getopt_internal (int argc, char **argv, char *shortopts, - GETOPT_LONG_OPTION_T * longopts, int *longind, int only) -{ - GETOPT_ORDERING_T ordering = PERMUTE; - static size_t optwhere = 0; - size_t permute_from = 0; - int num_nonopts = 0; - int optindex = 0; - size_t match_chars = 0; - char *possible_arg = NULL; - int longopt_match = -1; - int has_arg = -1; - char *cp = NULL; - int arg_next = 0; - - /* first, deal with silly parameters and easy stuff */ - if (argc == 0 || argv == NULL || (shortopts == NULL && longopts == NULL)) - return (optopt = '?'); - if (optind >= argc || argv[optind] == NULL) - return EOF; - if (strcmp (argv[optind], "--") == 0) - { - optind++; - return EOF; - } - /* if this is our first time through */ - if (optind == 0) - optind = optwhere = 1; - - /* define ordering */ - if (shortopts != NULL && (*shortopts == '-' || *shortopts == '+')) - { - ordering = (*shortopts == '-') ? RETURN_IN_ORDER : REQUIRE_ORDER; - shortopts++; - } - else - ordering = (getenv ("POSIXLY_CORRECT") != NULL) ? REQUIRE_ORDER : PERMUTE; - - /* - * based on ordering, find our next option, if we're at the beginning of - * one - */ - if (optwhere == 1) - { - switch (ordering) - { - case PERMUTE: - permute_from = optind; - num_nonopts = 0; - while (!is_option (argv[optind], only)) - { - optind++; - num_nonopts++; - } - if (argv[optind] == NULL) - { - /* no more options */ - optind = permute_from; - return EOF; - } - else if (strcmp (argv[optind], "--") == 0) - { - /* no more options, but have to get `--' out of the way */ - permute (argv + permute_from, num_nonopts, 1); - optind = permute_from + 1; - return EOF; - } - break; - case RETURN_IN_ORDER: - if (!is_option (argv[optind], only)) - { - optarg = argv[optind++]; - return (optopt = 1); - } - break; - case REQUIRE_ORDER: - if (!is_option (argv[optind], only)) - return EOF; - break; - } - } - /* we've got an option, so parse it */ - - /* first, is it a long option? */ - if (longopts != NULL - && (memcmp (argv[optind], "--", 2) == 0 - || (only && argv[optind][0] == '+')) && optwhere == 1) - { - /* handle long options */ - if (memcmp (argv[optind], "--", 2) == 0) - optwhere = 2; - longopt_match = -1; - possible_arg = strchr (argv[optind] + optwhere, '='); - if (possible_arg == NULL) - { - /* no =, so next argv might be arg */ - match_chars = strlen (argv[optind]); - possible_arg = argv[optind] + match_chars; - match_chars = match_chars - optwhere; - } - else - match_chars = (possible_arg - argv[optind]) - optwhere; - for (optindex = 0; longopts[optindex].name != NULL; optindex++) - { - if (memcmp (argv[optind] + optwhere, - longopts[optindex].name, match_chars) == 0) - { - /* do we have an exact match? */ - if (match_chars == strlen (longopts[optindex].name)) - { - longopt_match = optindex; - break; - } - /* do any characters match? */ - else - { - if (longopt_match < 0) - longopt_match = optindex; - else - { - /* we have ambiguous options */ - if (opterr) - fprintf (stderr, "%s: option `%s' is ambiguous " - "(could be `--%s' or `--%s')\n", - argv[0], - argv[optind], - longopts[longopt_match].name, - longopts[optindex].name); - return (optopt = '?'); - } - } - } - } - if (longopt_match >= 0) - has_arg = longopts[longopt_match].has_arg; - } - /* if we didn't find a long option, is it a short option? */ - if (longopt_match < 0 && shortopts != NULL) - { - cp = strchr (shortopts, argv[optind][optwhere]); - if (cp == NULL) - { - /* couldn't find option in shortopts */ - if (opterr) - fprintf (stderr, - "%s: invalid option -- `-%c'\n", - argv[0], argv[optind][optwhere]); - optwhere++; - if (argv[optind][optwhere] == '\0') - { - optind++; - optwhere = 1; - } - return (optopt = '?'); - } - has_arg = ((cp[1] == ':') - ? ((cp[2] == ':') ? OPTIONAL_ARG : required_argument) : no_argument); - possible_arg = argv[optind] + optwhere + 1; - optopt = *cp; - } - /* get argument and reset optwhere */ - arg_next = 0; - switch (has_arg) - { - case OPTIONAL_ARG: - if (*possible_arg == '=') - possible_arg++; - if (*possible_arg != '\0') - { - optarg = possible_arg; - optwhere = 1; - } - else - optarg = NULL; - break; - case required_argument: - if (*possible_arg == '=') - possible_arg++; - if (*possible_arg != '\0') - { - optarg = possible_arg; - optwhere = 1; - } - else if (optind + 1 >= argc) - { - if (opterr) - { - fprintf (stderr, "%s: argument required for option `", argv[0]); - if (longopt_match >= 0) - fprintf (stderr, "--%s'\n", longopts[longopt_match].name); - else - fprintf (stderr, "-%c'\n", *cp); - } - optind++; - return (optopt = ':'); - } - else - { - optarg = argv[optind + 1]; - arg_next = 1; - optwhere = 1; - } - break; - case no_argument: - if (longopt_match < 0) - { - optwhere++; - if (argv[optind][optwhere] == '\0') - optwhere = 1; - } - else - optwhere = 1; - optarg = NULL; - break; - } - - /* do we have to permute or otherwise modify optind? */ - if (ordering == PERMUTE && optwhere == 1 && num_nonopts != 0) - { - permute (argv + permute_from, num_nonopts, 1 + arg_next); - optind = permute_from + 1 + arg_next; - } - else if (optwhere == 1) - optind = optind + 1 + arg_next; - - /* finally return */ - if (longopt_match >= 0) - { - if (longind != NULL) - *longind = longopt_match; - if (longopts[longopt_match].flag != NULL) - { - *(longopts[longopt_match].flag) = longopts[longopt_match].val; - return 0; - } - else - return longopts[longopt_match].val; - } - else - return optopt; -} - -#ifndef _AIX -int -getopt (int argc, char **argv, char *optstring) -{ - return getopt_internal (argc, argv, optstring, NULL, NULL, 0); -} -#endif - -int -getopt_long (int argc, char **argv, const char *shortopts, - const GETOPT_LONG_OPTION_T * longopts, int *longind) -{ - return getopt_internal (argc, argv, (char*)shortopts, (GETOPT_LONG_OPTION_T*)longopts, longind, 0); -} - -int -getopt_long_only (int argc, char **argv, const char *shortopts, - const GETOPT_LONG_OPTION_T * longopts, int *longind) -{ - return getopt_internal (argc, argv, (char*)shortopts, (GETOPT_LONG_OPTION_T*)longopts, longind, 1); -} - -/* end of file GETOPT.C */ diff --git a/ninja/src/getopt.h b/ninja/src/getopt.h deleted file mode 100644 index 965dc29003c..00000000000 --- a/ninja/src/getopt.h +++ /dev/null @@ -1,57 +0,0 @@ -#ifndef GETOPT_H -#define GETOPT_H - -/* include files needed by this include file */ - -/* macros defined by this include file */ -#define no_argument 0 -#define required_argument 1 -#define OPTIONAL_ARG 2 - -/* types defined by this include file */ - -/* GETOPT_LONG_OPTION_T: The type of long option */ -typedef struct GETOPT_LONG_OPTION_T -{ - const char *name; /* the name of the long option */ - int has_arg; /* one of the above macros */ - int *flag; /* determines if getopt_long() returns a - * value for a long option; if it is - * non-NULL, 0 is returned as a function - * value and the value of val is stored in - * the area pointed to by flag. Otherwise, - * val is returned. */ - int val; /* determines the value to return if flag is - * NULL. */ -} GETOPT_LONG_OPTION_T; - -typedef GETOPT_LONG_OPTION_T option; - -#ifdef __cplusplus -extern "C" -{ -#endif - - /* externally-defined variables */ - extern char *optarg; - extern int optind; - extern int opterr; - extern int optopt; - - /* function prototypes */ -#ifndef _AIX - int getopt (int argc, char **argv, char *optstring); -#endif - int getopt_long (int argc, char **argv, const char *shortopts, - const GETOPT_LONG_OPTION_T * longopts, int *longind); - int getopt_long_only (int argc, char **argv, const char *shortopts, - const GETOPT_LONG_OPTION_T * longopts, int *longind); - -#ifdef __cplusplus -}; - -#endif - -#endif /* GETOPT_H */ - -/* END OF FILE getopt.h */ diff --git a/ninja/src/graph.cc b/ninja/src/graph.cc deleted file mode 100644 index 9b75224c42b..00000000000 --- a/ninja/src/graph.cc +++ /dev/null @@ -1,685 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "graph.h" - -#include -#include - -#include "build_log.h" -#include "debug_flags.h" -#include "depfile_parser.h" -#include "deps_log.h" -#include "disk_interface.h" -#include "manifest_parser.h" -#include "metrics.h" -#include "state.h" -#include "util.h" - -bool Node::Stat(DiskInterface* disk_interface, string* err) { - return (mtime_ = disk_interface->Stat(path_, err)) != -1; -} - -bool DependencyScan::RecomputeDirty(Node* node, string* err) { - vector stack; - return RecomputeDirty(node, &stack, err); -} - -bool DependencyScan::RecomputeDirty(Node* node, vector* stack, - string* err) { - Edge* edge = node->in_edge(); - if (!edge) { - // If we already visited this leaf node then we are done. - if (node->status_known()) - return true; - // This node has no in-edge; it is dirty if it is missing. - if (!node->StatIfNecessary(disk_interface_, err)) - return false; - if (!node->exists()) - EXPLAIN("%s has no in-edge and is missing", node->path().c_str()); - node->set_dirty(!node->exists()); - return true; - } - - // If we already finished this edge then we are done. - if (edge->mark_ == Edge::VisitDone) - return true; - - // If we encountered this edge earlier in the call stack we have a cycle. - if (!VerifyDAG(node, stack, err)) - return false; - - // Mark the edge temporarily while in the call stack. - edge->mark_ = Edge::VisitInStack; - stack->push_back(node); - - bool dirty = false; - edge->outputs_ready_ = true; - edge->deps_missing_ = false; - - if (!edge->deps_loaded_) { - // This is our first encounter with this edge. - // If there is a pending dyndep file, visit it now: - // * If the dyndep file is ready then load it now to get any - // additional inputs and outputs for this and other edges. - // Once the dyndep file is loaded it will no longer be pending - // if any other edges encounter it, but they will already have - // been updated. - // * If the dyndep file is not ready then since is known to be an - // input to this edge, the edge will not be considered ready below. - // Later during the build the dyndep file will become ready and be - // loaded to update this edge before it can possibly be scheduled. - if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { - if (!RecomputeDirty(edge->dyndep_, stack, err)) - return false; - - if (!edge->dyndep_->in_edge() || - edge->dyndep_->in_edge()->outputs_ready()) { - // The dyndep file is ready, so load it now. - if (!LoadDyndeps(edge->dyndep_, err)) - return false; - } - } - } - - // Load output mtimes so we can compare them to the most recent input below. - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (!(*o)->StatIfNecessary(disk_interface_, err)) - return false; - } - - if (!edge->deps_loaded_) { - // This is our first encounter with this edge. Load discovered deps. - edge->deps_loaded_ = true; - if (!dep_loader_.LoadDeps(edge, err)) { - if (!err->empty()) - return false; - // Failed to load dependency info: rebuild to regenerate it. - // LoadDeps() did EXPLAIN() already, no need to do it here. - dirty = edge->deps_missing_ = true; - } - } - - // Visit all inputs; we're dirty if any of the inputs are dirty. - Node* most_recent_input = NULL; - for (vector::iterator i = edge->inputs_.begin(); - i != edge->inputs_.end(); ++i) { - // Visit this input. - if (!RecomputeDirty(*i, stack, err)) - return false; - - // If an input is not ready, neither are our outputs. - if (Edge* in_edge = (*i)->in_edge()) { - if (!in_edge->outputs_ready_) - edge->outputs_ready_ = false; - } - - if (!edge->is_order_only(i - edge->inputs_.begin())) { - // If a regular input is dirty (or missing), we're dirty. - // Otherwise consider mtime. - if ((*i)->dirty()) { - EXPLAIN("%s is dirty", (*i)->path().c_str()); - dirty = true; - } else { - if (!most_recent_input || (*i)->mtime() > most_recent_input->mtime()) { - most_recent_input = *i; - } - } - } - } - - // We may also be dirty due to output state: missing outputs, out of - // date outputs, etc. Visit all outputs and determine whether they're dirty. - if (!dirty) - if (!RecomputeOutputsDirty(edge, most_recent_input, &dirty, err)) - return false; - - // Finally, visit each output and update their dirty state if necessary. - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (dirty) - (*o)->MarkDirty(); - } - - // If an edge is dirty, its outputs are normally not ready. (It's - // possible to be clean but still not be ready in the presence of - // order-only inputs.) - // But phony edges with no inputs have nothing to do, so are always - // ready. - if (dirty && !(edge->is_phony() && edge->inputs_.empty())) - edge->outputs_ready_ = false; - - // Mark the edge as finished during this walk now that it will no longer - // be in the call stack. - edge->mark_ = Edge::VisitDone; - assert(stack->back() == node); - stack->pop_back(); - - return true; -} - -bool DependencyScan::VerifyDAG(Node* node, vector* stack, string* err) { - Edge* edge = node->in_edge(); - assert(edge != NULL); - - // If we have no temporary mark on the edge then we do not yet have a cycle. - if (edge->mark_ != Edge::VisitInStack) - return true; - - // We have this edge earlier in the call stack. Find it. - vector::iterator start = stack->begin(); - while (start != stack->end() && (*start)->in_edge() != edge) - ++start; - assert(start != stack->end()); - - // Make the cycle clear by reporting its start as the node at its end - // instead of some other output of the starting edge. For example, - // running 'ninja b' on - // build a b: cat c - // build c: cat a - // should report a -> c -> a instead of b -> c -> a. - *start = node; - - // Construct the error message rejecting the cycle. - *err = "dependency cycle: "; - for (vector::const_iterator i = start; i != stack->end(); ++i) { - err->append((*i)->path()); - err->append(" -> "); - } - err->append((*start)->path()); - - if ((start + 1) == stack->end() && edge->maybe_phonycycle_diagnostic()) { - // The manifest parser would have filtered out the self-referencing - // input if it were not configured to allow the error. - err->append(" [-w phonycycle=err]"); - } - - return false; -} - -bool DependencyScan::RecomputeOutputsDirty(Edge* edge, Node* most_recent_input, - bool* outputs_dirty, string* err) { - string command = edge->EvaluateCommand(/*incl_rsp_file=*/true); - for (vector::iterator o = edge->outputs_.begin(); - o != edge->outputs_.end(); ++o) { - if (RecomputeOutputDirty(edge, most_recent_input, command, *o)) { - *outputs_dirty = true; - return true; - } - } - return true; -} - -bool DependencyScan::RecomputeOutputDirty(Edge* edge, - Node* most_recent_input, - const string& command, - Node* output) { - if (edge->is_phony()) { - // Phony edges don't write any output. Outputs are only dirty if - // there are no inputs and we're missing the output. - if (edge->inputs_.empty() && !output->exists()) { - EXPLAIN("output %s of phony edge with no inputs doesn't exist", - output->path().c_str()); - return true; - } - return false; - } - - BuildLog::LogEntry* entry = 0; - - // Dirty if we're missing the output. - if (!output->exists()) { - EXPLAIN("output %s doesn't exist", output->path().c_str()); - return true; - } - - // Dirty if the output is older than the input. - if (most_recent_input && output->mtime() < most_recent_input->mtime()) { - TimeStamp output_mtime = output->mtime(); - - // If this is a restat rule, we may have cleaned the output with a restat - // rule in a previous run and stored the most recent input mtime in the - // build log. Use that mtime instead, so that the file will only be - // considered dirty if an input was modified since the previous run. - bool used_restat = false; - if (edge->GetBindingBool("restat") && build_log() && - (entry = build_log()->LookupByOutput(output->path()))) { - output_mtime = entry->mtime; - used_restat = true; - } - - if (output_mtime < most_recent_input->mtime()) { - EXPLAIN("%soutput %s older than most recent input %s " - "(%" PRId64 " vs %" PRId64 ")", - used_restat ? "restat of " : "", output->path().c_str(), - most_recent_input->path().c_str(), - output_mtime, most_recent_input->mtime()); - return true; - } - } - - if (build_log()) { - bool generator = edge->GetBindingBool("generator"); - if (entry || (entry = build_log()->LookupByOutput(output->path()))) { - if (!generator && - BuildLog::LogEntry::HashCommand(command) != entry->command_hash) { - // May also be dirty due to the command changing since the last build. - // But if this is a generator rule, the command changing does not make us - // dirty. - EXPLAIN("command line changed for %s", output->path().c_str()); - return true; - } - if (most_recent_input && entry->mtime < most_recent_input->mtime()) { - // May also be dirty due to the mtime in the log being older than the - // mtime of the most recent input. This can occur even when the mtime - // on disk is newer if a previous run wrote to the output file but - // exited with an error or was interrupted. - EXPLAIN("recorded mtime of %s older than most recent input %s (%" PRId64 " vs %" PRId64 ")", - output->path().c_str(), most_recent_input->path().c_str(), - entry->mtime, most_recent_input->mtime()); - return true; - } - } - if (!entry && !generator) { - EXPLAIN("command line not found in log for %s", output->path().c_str()); - return true; - } - } - - return false; -} - -bool DependencyScan::LoadDyndeps(Node* node, string* err) const { - return dyndep_loader_.LoadDyndeps(node, err); -} - -bool DependencyScan::LoadDyndeps(Node* node, DyndepFile* ddf, - string* err) const { - return dyndep_loader_.LoadDyndeps(node, ddf, err); -} - -bool Edge::AllInputsReady() const { - for (vector::const_iterator i = inputs_.begin(); - i != inputs_.end(); ++i) { - if ((*i)->in_edge() && !(*i)->in_edge()->outputs_ready()) - return false; - } - return true; -} - -enum FileType { - REMOVE_EXT, - REMOVE_BASENAME -}; -/// An Env for an Edge, providing $in and $out. -struct EdgeEnv : public Env { - enum EscapeKind { kShellEscape, kDoNotEscape }; - - EdgeEnv(Edge* edge, EscapeKind escape) - : edge_(edge), escape_in_out_(escape)/*, recursive_(false)*/ {} - virtual string LookupVariable(const string& var); - - /// Given a span of Nodes, construct a list of paths suitable for a command - /// line. - string MakePathList(vector::iterator begin, - vector::iterator end, - char sep); - string MakePath(const Node* node, FileType filetype); - - private: - vector lookups_; - Edge* edge_; - EscapeKind escape_in_out_; - // bool recursive_; -}; - -string EdgeEnv::LookupVariable(const string& var) { - - - if( var == "i") { - return MakePath(edge_->inputs_.front(),(FileType) -1); - } - if (var == "out") { - return MakePath(edge_->outputs_.front(), (FileType)-1); - } - if (var == "in_d") { - return MakePath(*(edge_->inputs_.begin()), REMOVE_BASENAME); - } - if (var == "in") { - int explicit_deps_count = - edge_->inputs_.size() - edge_->implicit_deps_ - edge_->order_only_deps_; - return MakePathList(edge_->inputs_.begin(), - edge_->inputs_.begin() + explicit_deps_count, ' '); - } - if (var == "out_last") { - return MakePath(edge_->outputs_.back(), (FileType)-1); - } - - - // if (recursive_) { - // vector::const_iterator it; - // if ((it = find(lookups_.begin(), lookups_.end(), var)) != lookups_.end()) { - // string cycle; - // for (; it != lookups_.end(); ++it) - // cycle.append(*it + " -> "); - // cycle.append(var); - // Fatal(("cycle in rule variables: " + cycle).c_str()); - // } - // } - - // See notes on BindingEnv::LookupWithFallback. - const EvalString* eval = edge_->rule_->GetBinding(var); - // if (recursive_ && eval) - // lookups_.push_back(var); - - // In practice, variables defined on rules never use another rule variable. - // For performance, only start checking for cycles after the first lookup. - // recursive_ = true; - return edge_->env_->LookupWithFallback(var, eval, this); -} - -string EdgeEnv::MakePathList(vector::iterator begin, - vector::iterator end, - char sep) { - string result; - for (vector::iterator i = begin; i != end; ++i) { - if (!result.empty()) - result.push_back(sep); - const string& path = (*i)->PathDecanonicalized(); - if (escape_in_out_ == kShellEscape) { -#if _WIN32 - GetWin32EscapedString(path, &result); -#else - GetShellEscapedString(path, &result); -#endif - } else { - result.append(path); - } - } - return result; -} - -string transform(const string& path, FileType filetype){ - switch (filetype) { - case REMOVE_EXT: - return path.substr(0, path.find_last_of('.')); - case REMOVE_BASENAME: - size_t pos = path.find_last_of("/\\"); - if(pos == string::npos){ - return string(); - } - return path.substr(0,pos); - } - return path; // fix warning-error - // This could be wrong in extreme cases - // long_path/../../'hell\ escaped_file' - // It happens rarely in bsb since module name - // can not be arbitrary -} - -string EdgeEnv::MakePath(const Node* node, FileType filetype){ - - const string& path = node ->PathDecanonicalized(); - if(escape_in_out_ == kShellEscape){ - string result; -#if _WIN32 - GetWin32EscapedString(transform(path,filetype), &result); -#else - GetShellEscapedString(transform(path,filetype), &result); -#endif - return result; - } else { - return transform(path,filetype); - } -} - -string Edge::EvaluateCommand(bool incl_rsp_file) { - string command = GetBinding("command"); -#if 0 - if (incl_rsp_file) { - string rspfile_content = GetBinding("rspfile_content"); - if (!rspfile_content.empty()) - command += ";rspfile=" + rspfile_content; - } -#endif - return command; -} - -string Edge::GetBinding(const string& key) { - EdgeEnv env(this, EdgeEnv::kShellEscape); - return env.LookupVariable(key); -} - -bool Edge::GetBindingBool(const string& key) { - return !GetBinding(key).empty(); -} - -string Edge::GetUnescapedDepfile() { - EdgeEnv env(this, EdgeEnv::kDoNotEscape); - return env.LookupVariable("depfile"); -} - -string Edge::GetUnescapedDyndep() { - EdgeEnv env(this, EdgeEnv::kDoNotEscape); - return env.LookupVariable("dyndep"); -} - -string Edge::GetUnescapedRspfile() { - EdgeEnv env(this, EdgeEnv::kDoNotEscape); - return env.LookupVariable("rspfile"); -} - -void Edge::Dump(const char* prefix) const { - printf("%s[ ", prefix); - for (vector::const_iterator i = inputs_.begin(); - i != inputs_.end() && *i != NULL; ++i) { - printf("%s ", (*i)->path().c_str()); - } - printf("--%s-> ", rule_->name().c_str()); - for (vector::const_iterator i = outputs_.begin(); - i != outputs_.end() && *i != NULL; ++i) { - printf("%s ", (*i)->path().c_str()); - } - if (pool_) { - if (!pool_->name().empty()) { - printf("(in pool '%s')", pool_->name().c_str()); - } - } else { - printf("(null pool?)"); - } - printf("] 0x%p\n", this); -} - -bool Edge::is_phony() const { - return rule_ == &State::kPhonyRule; -} - -bool Edge::use_console() const { - return pool() == &State::kConsolePool; -} - -bool Edge::maybe_phonycycle_diagnostic() const { - // CMake 2.8.12.x and 3.0.x produced self-referencing phony rules - // of the form "build a: phony ... a ...". Restrict our - // "phonycycle" diagnostic option to the form it used. - return is_phony() && outputs_.size() == 1 && implicit_outs_ == 0 && - implicit_deps_ == 0; -} - -// static -string Node::PathDecanonicalized(const string& path, uint64_t slash_bits) { - string result = path; -#ifdef _WIN32 - uint64_t mask = 1; - for (char* c = &result[0]; (c = strchr(c, '/')) != NULL;) { - if (slash_bits & mask) - *c = '\\'; - c++; - mask <<= 1; - } -#endif - return result; -} - -void Node::Dump(const char* prefix) const { - printf("%s <%s 0x%p> mtime: %" PRId64 "%s, (:%s), ", - prefix, path().c_str(), this, - mtime(), mtime() ? "" : " (:missing)", - dirty() ? " dirty" : " clean"); - if (in_edge()) { - in_edge()->Dump("in-edge: "); - } else { - printf("no in-edge\n"); - } - printf(" out edges:\n"); - for (vector::const_iterator e = out_edges().begin(); - e != out_edges().end() && *e != NULL; ++e) { - (*e)->Dump(" +- "); - } -} - -bool ImplicitDepLoader::LoadDeps(Edge* edge, string* err) { -#if 0 - string deps_type = edge->GetBinding("deps"); - if (!deps_type.empty()) - return LoadDepsFromLog(edge, err); -#endif - string depfile = edge->GetUnescapedDepfile(); - if (!depfile.empty()) - return LoadDepFile(edge, depfile, err); - - // No deps to load. - return true; -} - -bool ImplicitDepLoader::LoadDepFile(Edge* edge, const string& path, - string* err) { - METRIC_RECORD("depfile load"); - // Read depfile content. Treat a missing depfile as empty. - string content; - switch (disk_interface_->ReadFile(path, &content, err)) { - case DiskInterface::Okay: - break; - case DiskInterface::NotFound: - err->clear(); - break; - case DiskInterface::OtherError: - *err = "loading '" + path + "': " + *err; - return false; - } - // On a missing depfile: return false and empty *err. - if (content.empty()) { - EXPLAIN("depfile '%s' is missing", path.c_str()); - return false; - } - - DepfileParser depfile(depfile_parser_options_ - ? *depfile_parser_options_ - : DepfileParserOptions()); - string depfile_err; - if (!depfile.Parse(&content, &depfile_err)) { - *err = path + ": " + depfile_err; - return false; - } - - uint64_t unused; - if (!CanonicalizePath(const_cast(depfile.out_.str_), - &depfile.out_.len_, &unused, err)) { - *err = path + ": " + *err; - return false; - } - - // Check that this depfile matches the edge's output, if not return false to - // mark the edge as dirty. - Node* first_output = edge->outputs_[0]; - StringPiece opath = StringPiece(first_output->path()); - if (opath != depfile.out_) { - EXPLAIN("expected depfile '%s' to mention '%s', got '%s'", path.c_str(), - first_output->path().c_str(), depfile.out_.AsString().c_str()); - return false; - } - - // Preallocate space in edge->inputs_ to be filled in below. - vector::iterator implicit_dep = - PreallocateSpace(edge, depfile.ins_.size()); - - // Add all its in-edges. - for (vector::iterator i = depfile.ins_.begin(); - i != depfile.ins_.end(); ++i, ++implicit_dep) { - uint64_t slash_bits; - if (!CanonicalizePath(const_cast(i->str_), &i->len_, &slash_bits, - err)) - return false; - - Node* node = state_->GetNode(*i, slash_bits); - *implicit_dep = node; - node->AddOutEdge(edge); - CreatePhonyInEdge(node); - } - - return true; -} -#if 0 -bool ImplicitDepLoader::LoadDepsFromLog(Edge* edge, string* err) { - // NOTE: deps are only supported for single-target edges. - Node* output = edge->outputs_[0]; - DepsLog::Deps* deps = deps_log_ ? deps_log_->GetDeps(output) : NULL; - if (!deps) { - EXPLAIN("deps for '%s' are missing", output->path().c_str()); - return false; - } - - // Deps are invalid if the output is newer than the deps. - if (output->mtime() > deps->mtime) { - EXPLAIN("stored deps info out of date for '%s' (%" PRId64 " vs %" PRId64 ")", - output->path().c_str(), deps->mtime, output->mtime()); - return false; - } - - vector::iterator implicit_dep = - PreallocateSpace(edge, deps->node_count); - for (int i = 0; i < deps->node_count; ++i, ++implicit_dep) { - Node* node = deps->nodes[i]; - *implicit_dep = node; - node->AddOutEdge(edge); - CreatePhonyInEdge(node); - } - return true; -} -#endif -vector::iterator ImplicitDepLoader::PreallocateSpace(Edge* edge, - int count) { - edge->inputs_.insert(edge->inputs_.end() - edge->order_only_deps_, - (size_t)count, 0); - edge->implicit_deps_ += count; - return edge->inputs_.end() - edge->order_only_deps_ - count; -} - -void ImplicitDepLoader::CreatePhonyInEdge(Node* node) { - if (node->in_edge()) - return; - - Edge* phony_edge = state_->AddEdge(&State::kPhonyRule); - node->set_in_edge(phony_edge); - phony_edge->outputs_.push_back(node); - - // RecomputeDirty might not be called for phony_edge if a previous call - // to RecomputeDirty had caused the file to be stat'ed. Because previous - // invocations of RecomputeDirty would have seen this node without an - // input edge (and therefore ready), we have to set outputs_ready_ to true - // to avoid a potential stuck build. If we do call RecomputeDirty for - // this node, it will simply set outputs_ready_ to the correct value. - phony_edge->outputs_ready_ = true; -} diff --git a/ninja/src/graph.h b/ninja/src/graph.h deleted file mode 100644 index fcb9211c06e..00000000000 --- a/ninja/src/graph.h +++ /dev/null @@ -1,323 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_GRAPH_H_ -#define NINJA_GRAPH_H_ - -#include -#include -using namespace std; - -#include "dyndep.h" -#include "eval_env.h" -#include "timestamp.h" -#include "util.h" - -struct BuildLog; -struct DepfileParserOptions; -struct DiskInterface; -struct DepsLog; -struct Edge; -struct Node; -struct Pool; -struct State; - -/// Information about a node in the dependency graph: the file, whether -/// it's dirty, mtime, etc. -struct Node { - Node(const string& path, uint64_t slash_bits) - : path_(path), - slash_bits_(slash_bits), - mtime_(-1), - dirty_(false), - dyndep_pending_(false), - in_edge_(NULL)/*, - id_(-1) */ {} - - /// Return false on error. - bool Stat(DiskInterface* disk_interface, string* err); - - /// Return false on error. - bool StatIfNecessary(DiskInterface* disk_interface, string* err) { - if (status_known()) - return true; - return Stat(disk_interface, err); - } - - /// Mark as not-yet-stat()ed and not dirty. - void ResetState() { - mtime_ = -1; - dirty_ = false; - } - - /// Mark the Node as already-stat()ed and missing. - void MarkMissing() { - mtime_ = 0; - } - - bool exists() const { - return mtime_ != 0; - } - - bool status_known() const { - return mtime_ != -1; - } - - const string& path() const { return path_; } - /// Get |path()| but use slash_bits to convert back to original slash styles. - string PathDecanonicalized() const { - return PathDecanonicalized(path_, slash_bits_); - } - static string PathDecanonicalized(const string& path, - uint64_t slash_bits); - uint64_t slash_bits() const { return slash_bits_; } - - TimeStamp mtime() const { return mtime_; } - - bool dirty() const { return dirty_; } - void set_dirty(bool dirty) { dirty_ = dirty; } - void MarkDirty() { dirty_ = true; } - - bool dyndep_pending() const { return dyndep_pending_; } - void set_dyndep_pending(bool pending) { dyndep_pending_ = pending; } - - Edge* in_edge() const { return in_edge_; } - void set_in_edge(Edge* edge) { in_edge_ = edge; } - - // int id() const { return id_; } - // void set_id(int id) { id_ = id; } - - const vector& out_edges() const { return out_edges_; } - void AddOutEdge(Edge* edge) { out_edges_.push_back(edge); } - - void Dump(const char* prefix="") const; - -private: - string path_; - - /// Set bits starting from lowest for backslashes that were normalized to - /// forward slashes by CanonicalizePath. See |PathDecanonicalized|. - uint64_t slash_bits_; - - /// Possible values of mtime_: - /// -1: file hasn't been examined - /// 0: we looked, and file doesn't exist - /// >0: actual file's mtime - TimeStamp mtime_; - - /// Dirty is true when the underlying file is out-of-date. - /// But note that Edge::outputs_ready_ is also used in judging which - /// edges to build. - bool dirty_; - - /// Store whether dyndep information is expected from this node but - /// has not yet been loaded. - bool dyndep_pending_; - - /// The Edge that produces this Node, or NULL when there is no - /// known edge to produce it. - Edge* in_edge_; - - /// All Edges that use this Node as an input. - vector out_edges_; - - /// A dense integer id for the node, assigned and used by DepsLog. - // int id_; -}; - -/// An edge in the dependency graph; links between Nodes using Rules. -struct Edge { - enum VisitMark { - VisitNone, - VisitInStack, - VisitDone - }; - - Edge() : rule_(NULL), pool_(NULL), dyndep_(NULL), env_(NULL), - mark_(VisitNone), outputs_ready_(false), deps_loaded_(false), - deps_missing_(false), implicit_deps_(0), order_only_deps_(0), - implicit_outs_(0) {} - - /// Return true if all inputs' in-edges are ready. - bool AllInputsReady() const; - - /// Expand all variables in a command and return it as a string. - /// If incl_rsp_file is enabled, the string will also contain the - /// full contents of a response file (if applicable) - string EvaluateCommand(bool incl_rsp_file = false); - - /// Returns the shell-escaped value of |key|. - string GetBinding(const string& key); - bool GetBindingBool(const string& key); - - /// Like GetBinding("depfile"), but without shell escaping. - string GetUnescapedDepfile(); - /// Like GetBinding("dyndep"), but without shell escaping. - string GetUnescapedDyndep(); - /// Like GetBinding("rspfile"), but without shell escaping. - string GetUnescapedRspfile(); - - void Dump(const char* prefix="") const; - - const Rule* rule_; - Pool* pool_; - vector inputs_; - vector outputs_; - Node* dyndep_; - BindingEnv* env_; - VisitMark mark_; - bool outputs_ready_; - bool deps_loaded_; - bool deps_missing_; - - const Rule& rule() const { return *rule_; } - Pool* pool() const { return pool_; } - int weight() const { return 1; } - bool outputs_ready() const { return outputs_ready_; } - - // There are three types of inputs. - // 1) explicit deps, which show up as $in on the command line; - // 2) implicit deps, which the target depends on implicitly (e.g. C headers), - // and changes in them cause the target to rebuild; - // 3) order-only deps, which are needed before the target builds but which - // don't cause the target to rebuild. - // These are stored in inputs_ in that order, and we keep counts of - // #2 and #3 when we need to access the various subsets. - int implicit_deps_; - int order_only_deps_; - bool is_implicit(size_t index) { - return index >= inputs_.size() - order_only_deps_ - implicit_deps_ && - !is_order_only(index); - } - bool is_order_only(size_t index) { - return index >= inputs_.size() - order_only_deps_; - } - - // There are two types of outputs. - // 1) explicit outs, which show up as $out on the command line; - // 2) implicit outs, which the target generates but are not part of $out. - // These are stored in outputs_ in that order, and we keep a count of - // #2 to use when we need to access the various subsets. - int implicit_outs_; - bool is_implicit_out(size_t index) const { - return index >= outputs_.size() - implicit_outs_; - } - - bool is_phony() const; - bool use_console() const; - bool maybe_phonycycle_diagnostic() const; -}; - - -/// ImplicitDepLoader loads implicit dependencies, as referenced via the -/// "depfile" attribute in build files. -struct ImplicitDepLoader { - ImplicitDepLoader(State* state, DepsLog* deps_log, - DiskInterface* disk_interface, - DepfileParserOptions const* depfile_parser_options) - : state_(state), disk_interface_(disk_interface), deps_log_(deps_log), - depfile_parser_options_(depfile_parser_options) {} - - /// Load implicit dependencies for \a edge. - /// @return false on error (without filling \a err if info is just missing - // or out of date). - bool LoadDeps(Edge* edge, string* err); - - DepsLog* deps_log() const { - return deps_log_; - } - - private: - /// Load implicit dependencies for \a edge from a depfile attribute. - /// @return false on error (without filling \a err if info is just missing). - bool LoadDepFile(Edge* edge, const string& path, string* err); - -#if 0 - /// Load implicit dependencies for \a edge from the DepsLog. - /// @return false on error (without filling \a err if info is just missing). - bool LoadDepsFromLog(Edge* edge, string* err); -#endif - /// Preallocate \a count spaces in the input array on \a edge, returning - /// an iterator pointing at the first new space. - vector::iterator PreallocateSpace(Edge* edge, int count); - - /// If we don't have a edge that generates this input already, - /// create one; this makes us not abort if the input is missing, - /// but instead will rebuild in that circumstance. - void CreatePhonyInEdge(Node* node); - - State* state_; - DiskInterface* disk_interface_; - DepsLog* deps_log_; - DepfileParserOptions const* depfile_parser_options_; -}; - - -/// DependencyScan manages the process of scanning the files in a graph -/// and updating the dirty/outputs_ready state of all the nodes and edges. -struct DependencyScan { - DependencyScan(State* state, BuildLog* build_log, DepsLog* deps_log, - DiskInterface* disk_interface, - DepfileParserOptions const* depfile_parser_options) - : build_log_(build_log), - disk_interface_(disk_interface), - dep_loader_(state, deps_log, disk_interface, depfile_parser_options), - dyndep_loader_(state, disk_interface) {} - - /// Update the |dirty_| state of the given node by inspecting its input edge. - /// Examine inputs, outputs, and command lines to judge whether an edge - /// needs to be re-run, and update outputs_ready_ and each outputs' |dirty_| - /// state accordingly. - /// Returns false on failure. - bool RecomputeDirty(Node* node, string* err); - - /// Recompute whether any output of the edge is dirty, if so sets |*dirty|. - /// Returns false on failure. - bool RecomputeOutputsDirty(Edge* edge, Node* most_recent_input, - bool* dirty, string* err); - - BuildLog* build_log() const { - return build_log_; - } - void set_build_log(BuildLog* log) { - build_log_ = log; - } - - DepsLog* deps_log() const { - return dep_loader_.deps_log(); - } - - /// Load a dyndep file from the given node's path and update the - /// build graph with the new information. One overload accepts - /// a caller-owned 'DyndepFile' object in which to store the - /// information loaded from the dyndep file. - bool LoadDyndeps(Node* node, string* err) const; - bool LoadDyndeps(Node* node, DyndepFile* ddf, string* err) const; - - private: - bool RecomputeDirty(Node* node, vector* stack, string* err); - bool VerifyDAG(Node* node, vector* stack, string* err); - - /// Recompute whether a given single output should be marked dirty. - /// Returns true if so. - bool RecomputeOutputDirty(Edge* edge, Node* most_recent_input, - const string& command, Node* output); - - BuildLog* build_log_; - DiskInterface* disk_interface_; - ImplicitDepLoader dep_loader_; - DyndepLoader dyndep_loader_; -}; - -#endif // NINJA_GRAPH_H_ diff --git a/ninja/src/graph_test.cc b/ninja/src/graph_test.cc deleted file mode 100644 index c8cca1c0dfb..00000000000 --- a/ninja/src/graph_test.cc +++ /dev/null @@ -1,858 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "graph.h" -#include "build.h" - -#include "test.h" - -struct GraphTest : public StateTestWithBuiltinRules { - GraphTest() : scan_(&state_, NULL, NULL, &fs_, NULL) {} - - VirtualFileSystem fs_; - DependencyScan scan_; -}; - -TEST_F(GraphTest, MissingImplicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in | implicit\n")); - fs_.Create("in", ""); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - // A missing implicit dep *should* make the output dirty. - // (In fact, a build will fail.) - // This is a change from prior semantics of ninja. - EXPECT_TRUE(GetNode("out")->dirty()); -} - -TEST_F(GraphTest, ModifiedImplicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out: cat in | implicit\n")); - fs_.Create("in", ""); - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("implicit", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - // A modified implicit dep should make the output dirty. - EXPECT_TRUE(GetNode("out")->dirty()); -} - -TEST_F(GraphTest, FunkyMakefilePath) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule catdep\n" -" depfile = $out.d\n" -" command = cat $in > $out\n" -"build out.o: catdep foo.cc\n")); - fs_.Create("foo.cc", ""); - fs_.Create("out.o.d", "out.o: ./foo/../implicit.h\n"); - fs_.Create("out.o", ""); - fs_.Tick(); - fs_.Create("implicit.h", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - - // implicit.h has changed, though our depfile refers to it with a - // non-canonical path; we should still find it. - EXPECT_TRUE(GetNode("out.o")->dirty()); -} - -TEST_F(GraphTest, ExplicitImplicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule catdep\n" -" depfile = $out.d\n" -" command = cat $in > $out\n" -"build implicit.h: cat data\n" -"build out.o: catdep foo.cc || implicit.h\n")); - fs_.Create("implicit.h", ""); - fs_.Create("foo.cc", ""); - fs_.Create("out.o.d", "out.o: implicit.h\n"); - fs_.Create("out.o", ""); - fs_.Tick(); - fs_.Create("data", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - - // We have both an implicit and an explicit dep on implicit.h. - // The implicit dep should "win" (in the sense that it should cause - // the output to be dirty). - EXPECT_TRUE(GetNode("out.o")->dirty()); -} - -TEST_F(GraphTest, ImplicitOutputParse) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out | out.imp: cat in\n")); - - Edge* edge = GetNode("out")->in_edge(); - EXPECT_EQ(2, edge->outputs_.size()); - EXPECT_EQ("out", edge->outputs_[0]->path()); - EXPECT_EQ("out.imp", edge->outputs_[1]->path()); - EXPECT_EQ(1, edge->implicit_outs_); - EXPECT_EQ(edge, GetNode("out.imp")->in_edge()); -} - -TEST_F(GraphTest, ImplicitOutputMissing) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out | out.imp: cat in\n")); - fs_.Create("in", ""); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("out")->dirty()); - EXPECT_TRUE(GetNode("out.imp")->dirty()); -} - -TEST_F(GraphTest, ImplicitOutputOutOfDate) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out | out.imp: cat in\n")); - fs_.Create("out.imp", ""); - fs_.Tick(); - fs_.Create("in", ""); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("out")->dirty()); - EXPECT_TRUE(GetNode("out.imp")->dirty()); -} - -TEST_F(GraphTest, ImplicitOutputOnlyParse) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build | out.imp: cat in\n")); - - Edge* edge = GetNode("out.imp")->in_edge(); - EXPECT_EQ(1, edge->outputs_.size()); - EXPECT_EQ("out.imp", edge->outputs_[0]->path()); - EXPECT_EQ(1, edge->implicit_outs_); - EXPECT_EQ(edge, GetNode("out.imp")->in_edge()); -} - -TEST_F(GraphTest, ImplicitOutputOnlyMissing) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build | out.imp: cat in\n")); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("out.imp")->dirty()); -} - -TEST_F(GraphTest, ImplicitOutputOnlyOutOfDate) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build | out.imp: cat in\n")); - fs_.Create("out.imp", ""); - fs_.Tick(); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.imp"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("out.imp")->dirty()); -} - -TEST_F(GraphTest, PathWithCurrentDirectory) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule catdep\n" -" depfile = $out.d\n" -" command = cat $in > $out\n" -"build ./out.o: catdep ./foo.cc\n")); - fs_.Create("foo.cc", ""); - fs_.Create("out.o.d", "out.o: foo.cc\n"); - fs_.Create("out.o", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(GetNode("out.o")->dirty()); -} - -TEST_F(GraphTest, RootNodes) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out1: cat in1\n" -"build mid1: cat in1\n" -"build out2: cat mid1\n" -"build out3 out4: cat mid1\n")); - - string err; - vector root_nodes = state_.RootNodes(&err); - EXPECT_EQ(4u, root_nodes.size()); - for (size_t i = 0; i < root_nodes.size(); ++i) { - string name = root_nodes[i]->path(); - EXPECT_EQ("out", name.substr(0, 3)); - } -} - -TEST_F(GraphTest, VarInOutPathEscaping) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build a$ b: cat no'space with$ space$$ no\"space2\n")); - - Edge* edge = GetNode("a b")->in_edge(); -#if _WIN32 - EXPECT_EQ("cat no'space \"with space$\" \"no\\\"space2\" > \"a b\"", - edge->EvaluateCommand()); -#else - EXPECT_EQ("cat 'no'\\''space' 'with space$' 'no\"space2' > 'a b'", - edge->EvaluateCommand()); -#endif -} - -// Regression test for https://github.com/ninja-build/ninja/issues/380 -TEST_F(GraphTest, DepfileWithCanonicalizablePath) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule catdep\n" -" depfile = $out.d\n" -" command = cat $in > $out\n" -"build ./out.o: catdep ./foo.cc\n")); - fs_.Create("foo.cc", ""); - fs_.Create("out.o.d", "out.o: bar/../foo.cc\n"); - fs_.Create("out.o", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(GetNode("out.o")->dirty()); -} - -// Regression test for https://github.com/ninja-build/ninja/issues/404 -TEST_F(GraphTest, DepfileRemoved) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule catdep\n" -" depfile = $out.d\n" -" command = cat $in > $out\n" -"build ./out.o: catdep ./foo.cc\n")); - fs_.Create("foo.h", ""); - fs_.Create("foo.cc", ""); - fs_.Tick(); - fs_.Create("out.o.d", "out.o: foo.h\n"); - fs_.Create("out.o", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - EXPECT_FALSE(GetNode("out.o")->dirty()); - - state_.Reset(); - fs_.RemoveFile("out.o.d"); - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out.o"), &err)); - ASSERT_EQ("", err); - EXPECT_TRUE(GetNode("out.o")->dirty()); -} - -// Check that rule-level variables are in scope for eval. -TEST_F(GraphTest, RuleVariablesInScope) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule r\n" -" depfile = x\n" -" command = depfile is $depfile\n" -"build out: r in\n")); - Edge* edge = GetNode("out")->in_edge(); - EXPECT_EQ("depfile is x", edge->EvaluateCommand()); -} - -// Check that build statements can override rule builtins like depfile. -TEST_F(GraphTest, DepfileOverride) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule r\n" -" depfile = x\n" -" command = unused\n" -"build out: r in\n" -" depfile = y\n")); - Edge* edge = GetNode("out")->in_edge(); - EXPECT_EQ("y", edge->GetBinding("depfile")); -} - -// Check that overridden values show up in expansion of rule-level bindings. -TEST_F(GraphTest, DepfileOverrideParent) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"rule r\n" -" depfile = x\n" -" command = depfile is $depfile\n" -"build out: r in\n" -" depfile = y\n")); - Edge* edge = GetNode("out")->in_edge(); - EXPECT_EQ("depfile is y", edge->GetBinding("command")); -} - -// Verify that building a nested phony rule prints "no work to do" -TEST_F(GraphTest, NestedPhonyPrintsDone) { - AssertParse(&state_, -"build n1: phony \n" -"build n2: phony n1\n" - ); - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("n2"), &err)); - ASSERT_EQ("", err); - - Plan plan_; - EXPECT_TRUE(plan_.AddTarget(GetNode("n2"), &err)); - ASSERT_EQ("", err); - - EXPECT_EQ(0, plan_.command_edge_count()); - ASSERT_FALSE(plan_.more_to_do()); -} - -TEST_F(GraphTest, PhonySelfReferenceError) { - ManifestParserOptions parser_opts; - parser_opts.phony_cycle_action_ = kPhonyCycleActionError; - AssertParse(&state_, -"build a: phony a\n", - parser_opts); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err)); - ASSERT_EQ("dependency cycle: a -> a [-w phonycycle=err]", err); -} - -TEST_F(GraphTest, DependencyCycle) { - AssertParse(&state_, -"build out: cat mid\n" -"build mid: cat in\n" -"build in: cat pre\n" -"build pre: cat out\n"); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("dependency cycle: out -> mid -> in -> pre -> out", err); -} - -TEST_F(GraphTest, CycleInEdgesButNotInNodes1) { - string err; - AssertParse(&state_, -"build a b: cat a\n"); - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err)); - ASSERT_EQ("dependency cycle: a -> a", err); -} - -TEST_F(GraphTest, CycleInEdgesButNotInNodes2) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build b a: cat a\n")); - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err)); - ASSERT_EQ("dependency cycle: a -> a", err); -} - -TEST_F(GraphTest, CycleInEdgesButNotInNodes3) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build a b: cat c\n" -"build c: cat a\n")); - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("b"), &err)); - ASSERT_EQ("dependency cycle: a -> c -> a", err); -} - -TEST_F(GraphTest, CycleInEdgesButNotInNodes4) { - string err; - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build d: cat c\n" -"build c: cat b\n" -"build b: cat a\n" -"build a e: cat d\n" -"build f: cat e\n")); - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("f"), &err)); - ASSERT_EQ("dependency cycle: a -> d -> c -> b -> a", err); -} - -// Verify that cycles in graphs with multiple outputs are handled correctly -// in RecomputeDirty() and don't cause deps to be loaded multiple times. -TEST_F(GraphTest, CycleWithLengthZeroFromDepfile) { - AssertParse(&state_, -"rule deprule\n" -" depfile = dep.d\n" -" command = unused\n" -"build a b: deprule\n" - ); - fs_.Create("dep.d", "a: b\n"); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err)); - ASSERT_EQ("dependency cycle: b -> b", err); - - // Despite the depfile causing edge to be a cycle (it has outputs a and b, - // but the depfile also adds b as an input), the deps should have been loaded - // only once: - Edge* edge = GetNode("a")->in_edge(); - EXPECT_EQ(1, edge->inputs_.size()); - EXPECT_EQ("b", edge->inputs_[0]->path()); -} - -// Like CycleWithLengthZeroFromDepfile but with a higher cycle length. -TEST_F(GraphTest, CycleWithLengthOneFromDepfile) { - AssertParse(&state_, -"rule deprule\n" -" depfile = dep.d\n" -" command = unused\n" -"rule r\n" -" command = unused\n" -"build a b: deprule\n" -"build c: r b\n" - ); - fs_.Create("dep.d", "a: c\n"); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("a"), &err)); - ASSERT_EQ("dependency cycle: b -> c -> b", err); - - // Despite the depfile causing edge to be a cycle (|edge| has outputs a and b, - // but c's in_edge has b as input but the depfile also adds |edge| as - // output)), the deps should have been loaded only once: - Edge* edge = GetNode("a")->in_edge(); - EXPECT_EQ(1, edge->inputs_.size()); - EXPECT_EQ("c", edge->inputs_[0]->path()); -} - -// Like CycleWithLengthOneFromDepfile but building a node one hop away from -// the cycle. -TEST_F(GraphTest, CycleWithLengthOneFromDepfileOneHopAway) { - AssertParse(&state_, -"rule deprule\n" -" depfile = dep.d\n" -" command = unused\n" -"rule r\n" -" command = unused\n" -"build a b: deprule\n" -"build c: r b\n" -"build d: r a\n" - ); - fs_.Create("dep.d", "a: c\n"); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("d"), &err)); - ASSERT_EQ("dependency cycle: b -> c -> b", err); - - // Despite the depfile causing edge to be a cycle (|edge| has outputs a and b, - // but c's in_edge has b as input but the depfile also adds |edge| as - // output)), the deps should have been loaded only once: - Edge* edge = GetNode("a")->in_edge(); - EXPECT_EQ(1, edge->inputs_.size()); - EXPECT_EQ("c", edge->inputs_[0]->path()); -} - -#ifdef _WIN32 -TEST_F(GraphTest, Decanonicalize) { - ASSERT_NO_FATAL_FAILURE(AssertParse(&state_, -"build out\\out1: cat src\\in1\n" -"build out\\out2/out3\\out4: cat mid1\n" -"build out3 out4\\foo: cat mid1\n")); - - string err; - vector root_nodes = state_.RootNodes(&err); - EXPECT_EQ(4u, root_nodes.size()); - EXPECT_EQ(root_nodes[0]->path(), "out/out1"); - EXPECT_EQ(root_nodes[1]->path(), "out/out2/out3/out4"); - EXPECT_EQ(root_nodes[2]->path(), "out3"); - EXPECT_EQ(root_nodes[3]->path(), "out4/foo"); - EXPECT_EQ(root_nodes[0]->PathDecanonicalized(), "out\\out1"); - EXPECT_EQ(root_nodes[1]->PathDecanonicalized(), "out\\out2/out3\\out4"); - EXPECT_EQ(root_nodes[2]->PathDecanonicalized(), "out3"); - EXPECT_EQ(root_nodes[3]->PathDecanonicalized(), "out4\\foo"); -} -#endif - -TEST_F(GraphTest, DyndepLoadTrivial) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("", err); - EXPECT_FALSE(GetNode("dd")->dyndep_pending()); - - Edge* edge = GetNode("out")->in_edge(); - ASSERT_EQ(1u, edge->outputs_.size()); - EXPECT_EQ("out", edge->outputs_[0]->path()); - ASSERT_EQ(2u, edge->inputs_.size()); - EXPECT_EQ("in", edge->inputs_[0]->path()); - EXPECT_EQ("dd", edge->inputs_[1]->path()); - EXPECT_EQ(0u, edge->implicit_deps_); - EXPECT_EQ(1u, edge->order_only_deps_); - EXPECT_FALSE(edge->GetBindingBool("restat")); -} - -TEST_F(GraphTest, DyndepLoadMissingFile) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" dyndep = dd\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("loading 'dd': No such file or directory", err); -} - -TEST_F(GraphTest, DyndepLoadMissingEntry) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("'out' not mentioned in its dyndep file 'dd'", err); -} - -TEST_F(GraphTest, DyndepLoadExtraEntry) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" dyndep = dd\n" -"build out2: r in || dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out: dyndep\n" -"build out2: dyndep\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("dyndep file 'dd' mentions output 'out2' whose build statement " - "does not have a dyndep binding for the file", err); -} - -TEST_F(GraphTest, DyndepLoadOutputWithMultipleRules1) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out1 | out-twice.imp: r in1\n" -"build out2: r in2 || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out2 | out-twice.imp: dyndep\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("multiple rules generate out-twice.imp", err); -} - -TEST_F(GraphTest, DyndepLoadOutputWithMultipleRules2) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out1: r in1 || dd1\n" -" dyndep = dd1\n" -"build out2: r in2 || dd2\n" -" dyndep = dd2\n" - ); - fs_.Create("dd1", -"ninja_dyndep_version = 1\n" -"build out1 | out-twice.imp: dyndep\n" - ); - fs_.Create("dd2", -"ninja_dyndep_version = 1\n" -"build out2 | out-twice.imp: dyndep\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd1")->dyndep_pending()); - EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd1"), &err)); - EXPECT_EQ("", err); - ASSERT_TRUE(GetNode("dd2")->dyndep_pending()); - EXPECT_FALSE(scan_.LoadDyndeps(GetNode("dd2"), &err)); - EXPECT_EQ("multiple rules generate out-twice.imp", err); -} - -TEST_F(GraphTest, DyndepLoadMultiple) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out1: r in1 || dd\n" -" dyndep = dd\n" -"build out2: r in2 || dd\n" -" dyndep = dd\n" -"build outNot: r in3 || dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out1 | out1imp: dyndep | in1imp\n" -"build out2: dyndep | in2imp\n" -" restat = 1\n" - ); - - string err; - ASSERT_TRUE(GetNode("dd")->dyndep_pending()); - EXPECT_TRUE(scan_.LoadDyndeps(GetNode("dd"), &err)); - EXPECT_EQ("", err); - EXPECT_FALSE(GetNode("dd")->dyndep_pending()); - - Edge* edge1 = GetNode("out1")->in_edge(); - ASSERT_EQ(2u, edge1->outputs_.size()); - EXPECT_EQ("out1", edge1->outputs_[0]->path()); - EXPECT_EQ("out1imp", edge1->outputs_[1]->path()); - EXPECT_EQ(1u, edge1->implicit_outs_); - ASSERT_EQ(3u, edge1->inputs_.size()); - EXPECT_EQ("in1", edge1->inputs_[0]->path()); - EXPECT_EQ("in1imp", edge1->inputs_[1]->path()); - EXPECT_EQ("dd", edge1->inputs_[2]->path()); - EXPECT_EQ(1u, edge1->implicit_deps_); - EXPECT_EQ(1u, edge1->order_only_deps_); - EXPECT_FALSE(edge1->GetBindingBool("restat")); - EXPECT_EQ(edge1, GetNode("out1imp")->in_edge()); - Node* in1imp = GetNode("in1imp"); - ASSERT_EQ(1u, in1imp->out_edges().size()); - EXPECT_EQ(edge1, in1imp->out_edges()[0]); - - Edge* edge2 = GetNode("out2")->in_edge(); - ASSERT_EQ(1u, edge2->outputs_.size()); - EXPECT_EQ("out2", edge2->outputs_[0]->path()); - EXPECT_EQ(0u, edge2->implicit_outs_); - ASSERT_EQ(3u, edge2->inputs_.size()); - EXPECT_EQ("in2", edge2->inputs_[0]->path()); - EXPECT_EQ("in2imp", edge2->inputs_[1]->path()); - EXPECT_EQ("dd", edge2->inputs_[2]->path()); - EXPECT_EQ(1u, edge2->implicit_deps_); - EXPECT_EQ(1u, edge2->order_only_deps_); - EXPECT_TRUE(edge2->GetBindingBool("restat")); - Node* in2imp = GetNode("in2imp"); - ASSERT_EQ(1u, in2imp->out_edges().size()); - EXPECT_EQ(edge2, in2imp->out_edges()[0]); -} - -TEST_F(GraphTest, DyndepFileMissing) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("loading 'dd': No such file or directory", err); -} - -TEST_F(GraphTest, DyndepFileError) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" - ); - - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("'out' not mentioned in its dyndep file 'dd'", err); -} - -TEST_F(GraphTest, DyndepImplicitInputNewer) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out: dyndep | in\n" - ); - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(GetNode("in")->dirty()); - EXPECT_FALSE(GetNode("dd")->dirty()); - - // "out" is dirty due to dyndep-specified implicit input - EXPECT_TRUE(GetNode("out")->dirty()); -} - -TEST_F(GraphTest, DyndepFileReady) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build dd: r dd-in\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd-in", ""); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out: dyndep | in\n" - ); - fs_.Create("out", ""); - fs_.Tick(); - fs_.Create("in", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(GetNode("in")->dirty()); - EXPECT_FALSE(GetNode("dd")->dirty()); - EXPECT_TRUE(GetNode("dd")->in_edge()->outputs_ready()); - - // "out" is dirty due to dyndep-specified implicit input - EXPECT_TRUE(GetNode("out")->dirty()); -} - -TEST_F(GraphTest, DyndepFileNotClean) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build dd: r dd-in\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", "this-should-not-be-loaded"); - fs_.Tick(); - fs_.Create("dd-in", ""); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("dd")->dirty()); - EXPECT_FALSE(GetNode("dd")->in_edge()->outputs_ready()); - - // "out" is clean but not ready since "dd" is not ready - EXPECT_FALSE(GetNode("out")->dirty()); - EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); -} - -TEST_F(GraphTest, DyndepFileNotReady) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build tmp: r\n" -"build dd: r dd-in || tmp\n" -"build out: r || dd\n" -" dyndep = dd\n" - ); - fs_.Create("dd", "this-should-not-be-loaded"); - fs_.Create("dd-in", ""); - fs_.Tick(); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_FALSE(GetNode("dd")->dirty()); - EXPECT_FALSE(GetNode("dd")->in_edge()->outputs_ready()); - EXPECT_FALSE(GetNode("out")->dirty()); - EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); -} - -TEST_F(GraphTest, DyndepFileSecondNotReady) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build dd1: r dd1-in\n" -"build dd2-in: r || dd1\n" -" dyndep = dd1\n" -"build dd2: r dd2-in\n" -"build out: r || dd2\n" -" dyndep = dd2\n" - ); - fs_.Create("dd1", ""); - fs_.Create("dd2", ""); - fs_.Create("dd2-in", ""); - fs_.Tick(); - fs_.Create("dd1-in", ""); - fs_.Create("out", ""); - - string err; - EXPECT_TRUE(scan_.RecomputeDirty(GetNode("out"), &err)); - ASSERT_EQ("", err); - - EXPECT_TRUE(GetNode("dd1")->dirty()); - EXPECT_FALSE(GetNode("dd1")->in_edge()->outputs_ready()); - EXPECT_FALSE(GetNode("dd2")->dirty()); - EXPECT_FALSE(GetNode("dd2")->in_edge()->outputs_ready()); - EXPECT_FALSE(GetNode("out")->dirty()); - EXPECT_FALSE(GetNode("out")->in_edge()->outputs_ready()); -} - -TEST_F(GraphTest, DyndepFileCircular) { - AssertParse(&state_, -"rule r\n" -" command = unused\n" -"build out: r in || dd\n" -" depfile = out.d\n" -" dyndep = dd\n" -"build in: r circ\n" - ); - fs_.Create("out.d", "out: inimp\n"); - fs_.Create("dd", -"ninja_dyndep_version = 1\n" -"build out | circ: dyndep\n" - ); - fs_.Create("out", ""); - - Edge* edge = GetNode("out")->in_edge(); - string err; - EXPECT_FALSE(scan_.RecomputeDirty(GetNode("out"), &err)); - EXPECT_EQ("dependency cycle: circ -> in -> circ", err); - - // Verify that "out.d" was loaded exactly once despite - // circular reference discovered from dyndep file. - ASSERT_EQ(3u, edge->inputs_.size()); - EXPECT_EQ("in", edge->inputs_[0]->path()); - EXPECT_EQ("inimp", edge->inputs_[1]->path()); - EXPECT_EQ("dd", edge->inputs_[2]->path()); - EXPECT_EQ(1u, edge->implicit_deps_); - EXPECT_EQ(1u, edge->order_only_deps_); -} diff --git a/ninja/src/graphviz.cc b/ninja/src/graphviz.cc deleted file mode 100644 index 0d072512513..00000000000 --- a/ninja/src/graphviz.cc +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "graphviz.h" - -#include -#include - -#include "dyndep.h" -#include "graph.h" - -void GraphViz::AddTarget(Node* node) { - if (visited_nodes_.find(node) != visited_nodes_.end()) - return; - - string pathstr = node->path(); - replace(pathstr.begin(), pathstr.end(), '\\', '/'); - printf("\"%p\" [label=\"%s\"]\n", node, pathstr.c_str()); - visited_nodes_.insert(node); - - Edge* edge = node->in_edge(); - - if (!edge) { - // Leaf node. - // Draw as a rect? - return; - } - - if (visited_edges_.find(edge) != visited_edges_.end()) - return; - visited_edges_.insert(edge); - - if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { - std::string err; - if (!dyndep_loader_.LoadDyndeps(edge->dyndep_, &err)) { - Warning("%s\n", err.c_str()); - } - } - - if (edge->inputs_.size() == 1 && edge->outputs_.size() == 1) { - // Can draw simply. - // Note extra space before label text -- this is cosmetic and feels - // like a graphviz bug. - printf("\"%p\" -> \"%p\" [label=\" %s\"]\n", - edge->inputs_[0], edge->outputs_[0], edge->rule_->name().c_str()); - } else { - printf("\"%p\" [label=\"%s\", shape=ellipse]\n", - edge, edge->rule_->name().c_str()); - for (vector::iterator out = edge->outputs_.begin(); - out != edge->outputs_.end(); ++out) { - printf("\"%p\" -> \"%p\"\n", edge, *out); - } - for (vector::iterator in = edge->inputs_.begin(); - in != edge->inputs_.end(); ++in) { - const char* order_only = ""; - if (edge->is_order_only(in - edge->inputs_.begin())) - order_only = " style=dotted"; - printf("\"%p\" -> \"%p\" [arrowhead=none%s]\n", (*in), edge, order_only); - } - } - - for (vector::iterator in = edge->inputs_.begin(); - in != edge->inputs_.end(); ++in) { - AddTarget(*in); - } -} - -void GraphViz::Start() { - printf("digraph ninja {\n"); - printf("rankdir=\"LR\"\n"); - printf("node [fontsize=10, shape=box, height=0.25]\n"); - printf("edge [fontsize=10]\n"); -} - -void GraphViz::Finish() { - printf("}\n"); -} diff --git a/ninja/src/graphviz.h b/ninja/src/graphviz.h deleted file mode 100644 index 601c9b2ea06..00000000000 --- a/ninja/src/graphviz.h +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_GRAPHVIZ_H_ -#define NINJA_GRAPHVIZ_H_ - -#include - -#include "dyndep.h" - -struct DiskInterface; -struct Node; -struct Edge; -struct State; - -/// Runs the process of creating GraphViz .dot file output. -struct GraphViz { - GraphViz(State* state, DiskInterface* disk_interface) - : dyndep_loader_(state, disk_interface) {} - void Start(); - void AddTarget(Node* node); - void Finish(); - - DyndepLoader dyndep_loader_; - std::set visited_nodes_; - std::set visited_edges_; -}; - -#endif // NINJA_GRAPHVIZ_H_ diff --git a/ninja/src/hash_collision_bench.cc b/ninja/src/hash_collision_bench.cc deleted file mode 100644 index ff947dca60c..00000000000 --- a/ninja/src/hash_collision_bench.cc +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "build_log.h" - -#include -using namespace std; - -#include -#include - -int random(int low, int high) { - return int(low + (rand() / double(RAND_MAX)) * (high - low) + 0.5); -} - -void RandomCommand(char** s) { - int len = random(5, 100); - *s = new char[len]; - for (int i = 0; i < len; ++i) - (*s)[i] = (char)random(32, 127); -} - -int main() { - const int N = 20 * 1000 * 1000; - - // Leak these, else 10% of the runtime is spent destroying strings. - char** commands = new char*[N]; - pair* hashes = new pair[N]; - - srand((int)time(NULL)); - - for (int i = 0; i < N; ++i) { - RandomCommand(&commands[i]); - hashes[i] = make_pair(BuildLog::LogEntry::HashCommand(commands[i]), i); - } - - sort(hashes, hashes + N); - - int collision_count = 0; - for (int i = 1; i < N; ++i) { - if (hashes[i - 1].first == hashes[i].first) { - if (strcmp(commands[hashes[i - 1].second], - commands[hashes[i].second]) != 0) { - printf("collision!\n string 1: '%s'\n string 2: '%s'\n", - commands[hashes[i - 1].second], - commands[hashes[i].second]); - collision_count++; - } - } - } - printf("\n\n%d collisions after %d runs\n", collision_count, N); -} diff --git a/ninja/src/hash_map.h b/ninja/src/hash_map.h deleted file mode 100644 index 55d2c9d46d7..00000000000 --- a/ninja/src/hash_map.h +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_MAP_H_ -#define NINJA_MAP_H_ - -#include -#include -#include "string_piece.h" -#include "util.h" - -// MurmurHash2, by Austin Appleby -static inline -unsigned int MurmurHash2(const void* key, size_t len) { - static const unsigned int seed = 0xDECAFBAD; - const unsigned int m = 0x5bd1e995; - const int r = 24; - unsigned int h = seed ^ len; - const unsigned char* data = (const unsigned char*)key; - while (len >= 4) { - unsigned int k; - memcpy(&k, data, sizeof k); - k *= m; - k ^= k >> r; - k *= m; - h *= m; - h ^= k; - data += 4; - len -= 4; - } - switch (len) { - case 3: h ^= data[2] << 16; - NINJA_FALLTHROUGH; - case 2: h ^= data[1] << 8; - NINJA_FALLTHROUGH; - case 1: h ^= data[0]; - h *= m; - }; - h ^= h >> 13; - h *= m; - h ^= h >> 15; - return h; -} - -#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900) -#include - -namespace std { -template<> -struct hash { - typedef StringPiece argument_type; - typedef size_t result_type; - - size_t operator()(StringPiece key) const { - return MurmurHash2(key.str_, key.len_); - } -}; -} - -#elif defined(_MSC_VER) -#include - -using stdext::hash_map; -using stdext::hash_compare; - -struct StringPieceCmp : public hash_compare { - size_t operator()(const StringPiece& key) const { - return MurmurHash2(key.str_, key.len_); - } - bool operator()(const StringPiece& a, const StringPiece& b) const { - int cmp = memcmp(a.str_, b.str_, min(a.len_, b.len_)); - if (cmp < 0) { - return true; - } else if (cmp > 0) { - return false; - } else { - return a.len_ < b.len_; - } - } -}; - -#else -#include - -using __gnu_cxx::hash_map; - -namespace __gnu_cxx { -template<> -struct hash { - size_t operator()(StringPiece key) const { - return MurmurHash2(key.str_, key.len_); - } -}; -} -#endif - -/// A template for hash_maps keyed by a StringPiece whose string is -/// owned externally (typically by the values). Use like: -/// ExternalStringHash::Type foos; to make foos into a hash -/// mapping StringPiece => Foo*. -template -struct ExternalStringHashMap { -#if (__cplusplus >= 201103L) || (_MSC_VER >= 1900) - typedef std::unordered_map Type; -#elif defined(_MSC_VER) - typedef hash_map Type; -#else - typedef hash_map Type; -#endif -}; - -#endif // NINJA_MAP_H_ diff --git a/ninja/src/includes_normalize-win32.cc b/ninja/src/includes_normalize-win32.cc deleted file mode 100644 index 79bf5b46a93..00000000000 --- a/ninja/src/includes_normalize-win32.cc +++ /dev/null @@ -1,209 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "includes_normalize.h" - -#include "string_piece.h" -#include "string_piece_util.h" -#include "util.h" - -#include -#include -#include - -#include - -namespace { - -bool InternalGetFullPathName(const StringPiece& file_name, char* buffer, - size_t buffer_length, string *err) { - DWORD result_size = GetFullPathNameA(file_name.AsString().c_str(), - buffer_length, buffer, NULL); - if (result_size == 0) { - *err = "GetFullPathNameA(" + file_name.AsString() + "): " + - GetLastErrorString(); - return false; - } else if (result_size > buffer_length) { - *err = "path too long"; - return false; - } - return true; -} - -bool IsPathSeparator(char c) { - return c == '/' || c == '\\'; -} - -// Return true if paths a and b are on the same windows drive. -// Return false if this funcation cannot check -// whether or not on the same windows drive. -bool SameDriveFast(StringPiece a, StringPiece b) { - if (a.size() < 3 || b.size() < 3) { - return false; - } - - if (!islatinalpha(a[0]) || !islatinalpha(b[0])) { - return false; - } - - if (ToLowerASCII(a[0]) != ToLowerASCII(b[0])) { - return false; - } - - if (a[1] != ':' || b[1] != ':') { - return false; - } - - return IsPathSeparator(a[2]) && IsPathSeparator(b[2]); -} - -// Return true if paths a and b are on the same Windows drive. -bool SameDrive(StringPiece a, StringPiece b, string* err) { - if (SameDriveFast(a, b)) { - return true; - } - - char a_absolute[_MAX_PATH]; - char b_absolute[_MAX_PATH]; - if (!InternalGetFullPathName(a, a_absolute, sizeof(a_absolute), err)) { - return false; - } - if (!InternalGetFullPathName(b, b_absolute, sizeof(b_absolute), err)) { - return false; - } - char a_drive[_MAX_DIR]; - char b_drive[_MAX_DIR]; - _splitpath(a_absolute, a_drive, NULL, NULL, NULL); - _splitpath(b_absolute, b_drive, NULL, NULL, NULL); - return _stricmp(a_drive, b_drive) == 0; -} - -// Check path |s| is FullPath style returned by GetFullPathName. -// This ignores difference of path separator. -// This is used not to call very slow GetFullPathName API. -bool IsFullPathName(StringPiece s) { - if (s.size() < 3 || - !islatinalpha(s[0]) || - s[1] != ':' || - !IsPathSeparator(s[2])) { - return false; - } - - // Check "." or ".." is contained in path. - for (size_t i = 2; i < s.size(); ++i) { - if (!IsPathSeparator(s[i])) { - continue; - } - - // Check ".". - if (i + 1 < s.size() && s[i+1] == '.' && - (i + 2 >= s.size() || IsPathSeparator(s[i+2]))) { - return false; - } - - // Check "..". - if (i + 2 < s.size() && s[i+1] == '.' && s[i+2] == '.' && - (i + 3 >= s.size() || IsPathSeparator(s[i+3]))) { - return false; - } - } - - return true; -} - -} // anonymous namespace - -IncludesNormalize::IncludesNormalize(const string& relative_to) { - string err; - relative_to_ = AbsPath(relative_to, &err); - if (!err.empty()) { - Fatal("Initializing IncludesNormalize(): %s", err.c_str()); - } - split_relative_to_ = SplitStringPiece(relative_to_, '/'); -} - -string IncludesNormalize::AbsPath(StringPiece s, string* err) { - if (IsFullPathName(s)) { - string result = s.AsString(); - for (size_t i = 0; i < result.size(); ++i) { - if (result[i] == '\\') { - result[i] = '/'; - } - } - return result; - } - - char result[_MAX_PATH]; - if (!InternalGetFullPathName(s, result, sizeof(result), err)) { - return ""; - } - for (char* c = result; *c; ++c) - if (*c == '\\') - *c = '/'; - return result; -} - -string IncludesNormalize::Relativize( - StringPiece path, const vector& start_list, string* err) { - string abs_path = AbsPath(path, err); - if (!err->empty()) - return ""; - vector path_list = SplitStringPiece(abs_path, '/'); - int i; - for (i = 0; i < static_cast(min(start_list.size(), path_list.size())); - ++i) { - if (!EqualsCaseInsensitiveASCII(start_list[i], path_list[i])) { - break; - } - } - - vector rel_list; - rel_list.reserve(start_list.size() - i + path_list.size() - i); - for (int j = 0; j < static_cast(start_list.size() - i); ++j) - rel_list.push_back(".."); - for (int j = i; j < static_cast(path_list.size()); ++j) - rel_list.push_back(path_list[j]); - if (rel_list.size() == 0) - return "."; - return JoinStringPiece(rel_list, '/'); -} - -bool IncludesNormalize::Normalize(const string& input, - string* result, string* err) const { - char copy[_MAX_PATH + 1]; - size_t len = input.size(); - if (len > _MAX_PATH) { - *err = "path too long"; - return false; - } - strncpy(copy, input.c_str(), input.size() + 1); - uint64_t slash_bits; - if (!CanonicalizePath(copy, &len, &slash_bits, err)) - return false; - StringPiece partially_fixed(copy, len); - string abs_input = AbsPath(partially_fixed, err); - if (!err->empty()) - return false; - - if (!SameDrive(abs_input, relative_to_, err)) { - if (!err->empty()) - return false; - *result = partially_fixed.AsString(); - return true; - } - *result = Relativize(abs_input, split_relative_to_, err); - if (!err->empty()) - return false; - return true; -} diff --git a/ninja/src/includes_normalize.h b/ninja/src/includes_normalize.h deleted file mode 100644 index 0339581ec8b..00000000000 --- a/ninja/src/includes_normalize.h +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -using namespace std; - -struct StringPiece; - -/// Utility functions for normalizing include paths on Windows. -/// TODO: this likely duplicates functionality of CanonicalizePath; refactor. -struct IncludesNormalize { - /// Normalize path relative to |relative_to|. - IncludesNormalize(const string& relative_to); - - // Internal utilities made available for testing, maybe useful otherwise. - static string AbsPath(StringPiece s, string* err); - static string Relativize(StringPiece path, - const vector& start_list, string* err); - - /// Normalize by fixing slashes style, fixing redundant .. and . and makes the - /// path |input| relative to |this->relative_to_| and store to |result|. - bool Normalize(const string& input, string* result, string* err) const; - - private: - string relative_to_; - vector split_relative_to_; -}; diff --git a/ninja/src/includes_normalize_test.cc b/ninja/src/includes_normalize_test.cc deleted file mode 100644 index dbcdbe0eb81..00000000000 --- a/ninja/src/includes_normalize_test.cc +++ /dev/null @@ -1,167 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "includes_normalize.h" - -#include - -#include - -#include "string_piece_util.h" -#include "test.h" -#include "util.h" - -namespace { - -string GetCurDir() { - char buf[_MAX_PATH]; - _getcwd(buf, sizeof(buf)); - vector parts = SplitStringPiece(buf, '\\'); - return parts[parts.size() - 1].AsString(); -} - -string NormalizeAndCheckNoError(const string& input) { - string result, err; - IncludesNormalize normalizer("."); - EXPECT_TRUE(normalizer.Normalize(input, &result, &err)); - EXPECT_EQ("", err); - return result; -} - -string NormalizeRelativeAndCheckNoError(const string& input, - const string& relative_to) { - string result, err; - IncludesNormalize normalizer(relative_to); - EXPECT_TRUE(normalizer.Normalize(input, &result, &err)); - EXPECT_EQ("", err); - return result; -} - -} // namespace - -TEST(IncludesNormalize, Simple) { - EXPECT_EQ("b", NormalizeAndCheckNoError("a\\..\\b")); - EXPECT_EQ("b", NormalizeAndCheckNoError("a\\../b")); - EXPECT_EQ("a/b", NormalizeAndCheckNoError("a\\.\\b")); - EXPECT_EQ("a/b", NormalizeAndCheckNoError("a\\./b")); -} - -TEST(IncludesNormalize, WithRelative) { - string err; - string currentdir = GetCurDir(); - EXPECT_EQ("c", NormalizeRelativeAndCheckNoError("a/b/c", "a/b")); - EXPECT_EQ("a", - NormalizeAndCheckNoError(IncludesNormalize::AbsPath("a", &err))); - EXPECT_EQ("", err); - EXPECT_EQ(string("../") + currentdir + string("/a"), - NormalizeRelativeAndCheckNoError("a", "../b")); - EXPECT_EQ(string("../") + currentdir + string("/a/b"), - NormalizeRelativeAndCheckNoError("a/b", "../c")); - EXPECT_EQ("../../a", NormalizeRelativeAndCheckNoError("a", "b/c")); - EXPECT_EQ(".", NormalizeRelativeAndCheckNoError("a", "a")); -} - -TEST(IncludesNormalize, Case) { - EXPECT_EQ("b", NormalizeAndCheckNoError("Abc\\..\\b")); - EXPECT_EQ("BdEf", NormalizeAndCheckNoError("Abc\\..\\BdEf")); - EXPECT_EQ("A/b", NormalizeAndCheckNoError("A\\.\\b")); - EXPECT_EQ("a/b", NormalizeAndCheckNoError("a\\./b")); - EXPECT_EQ("A/B", NormalizeAndCheckNoError("A\\.\\B")); - EXPECT_EQ("A/B", NormalizeAndCheckNoError("A\\./B")); -} - -TEST(IncludesNormalize, DifferentDrive) { - EXPECT_EQ("stuff.h", - NormalizeRelativeAndCheckNoError("p:\\vs08\\stuff.h", "p:\\vs08")); - EXPECT_EQ("stuff.h", - NormalizeRelativeAndCheckNoError("P:\\Vs08\\stuff.h", "p:\\vs08")); - EXPECT_EQ("p:/vs08/stuff.h", - NormalizeRelativeAndCheckNoError("p:\\vs08\\stuff.h", "c:\\vs08")); - EXPECT_EQ("P:/vs08/stufF.h", NormalizeRelativeAndCheckNoError( - "P:\\vs08\\stufF.h", "D:\\stuff/things")); - EXPECT_EQ("P:/vs08/stuff.h", NormalizeRelativeAndCheckNoError( - "P:/vs08\\stuff.h", "D:\\stuff/things")); - EXPECT_EQ("P:/wee/stuff.h", - NormalizeRelativeAndCheckNoError("P:/vs08\\../wee\\stuff.h", - "D:\\stuff/things")); -} - -TEST(IncludesNormalize, LongInvalidPath) { - const char kLongInputString[] = - "C:\\Program Files (x86)\\Microsoft Visual Studio " - "12.0\\VC\\INCLUDEwarning #31001: The dll for reading and writing the " - "pdb (for example, mspdb110.dll) could not be found on your path. This " - "is usually a configuration error. Compilation will continue using /Z7 " - "instead of /Zi, but expect a similar error when you link your program."; - // Too long, won't be canonicalized. Ensure doesn't crash. - string result, err; - IncludesNormalize normalizer("."); - EXPECT_FALSE( - normalizer.Normalize(kLongInputString, &result, &err)); - EXPECT_EQ("path too long", err); - - - // Construct max size path having cwd prefix. - // kExactlyMaxPath = "$cwd\\a\\aaaa...aaaa\0"; - char kExactlyMaxPath[_MAX_PATH + 1]; - ASSERT_NE(_getcwd(kExactlyMaxPath, sizeof kExactlyMaxPath), NULL); - - int cwd_len = strlen(kExactlyMaxPath); - ASSERT_LE(cwd_len + 3 + 1, _MAX_PATH) - kExactlyMaxPath[cwd_len] = '\\'; - kExactlyMaxPath[cwd_len + 1] = 'a'; - kExactlyMaxPath[cwd_len + 2] = '\\'; - - kExactlyMaxPath[cwd_len + 3] = 'a'; - - for (int i = cwd_len + 4; i < _MAX_PATH; ++i) { - if (i > cwd_len + 4 && i < _MAX_PATH - 1 && i % 10 == 0) - kExactlyMaxPath[i] = '\\'; - else - kExactlyMaxPath[i] = 'a'; - } - - kExactlyMaxPath[_MAX_PATH] = '\0'; - EXPECT_EQ(strlen(kExactlyMaxPath), _MAX_PATH); - - string forward_slashes(kExactlyMaxPath); - replace(forward_slashes.begin(), forward_slashes.end(), '\\', '/'); - // Make sure a path that's exactly _MAX_PATH long is canonicalized. - EXPECT_EQ(forward_slashes.substr(cwd_len + 1), - NormalizeAndCheckNoError(kExactlyMaxPath)); -} - -TEST(IncludesNormalize, ShortRelativeButTooLongAbsolutePath) { - string result, err; - IncludesNormalize normalizer("."); - // A short path should work - EXPECT_TRUE(normalizer.Normalize("a", &result, &err)); - EXPECT_EQ("", err); - - // Construct max size path having cwd prefix. - // kExactlyMaxPath = "aaaa\\aaaa...aaaa\0"; - char kExactlyMaxPath[_MAX_PATH + 1]; - for (int i = 0; i < _MAX_PATH; ++i) { - if (i < _MAX_PATH - 1 && i % 10 == 4) - kExactlyMaxPath[i] = '\\'; - else - kExactlyMaxPath[i] = 'a'; - } - kExactlyMaxPath[_MAX_PATH] = '\0'; - EXPECT_EQ(strlen(kExactlyMaxPath), _MAX_PATH); - - // Make sure a path that's exactly _MAX_PATH long fails with a proper error. - EXPECT_FALSE(normalizer.Normalize(kExactlyMaxPath, &result, &err)); - EXPECT_TRUE(err.find("GetFullPathName") != string::npos); -} diff --git a/ninja/src/inline.sh b/ninja/src/inline.sh deleted file mode 100755 index b64e8cadf0b..00000000000 --- a/ninja/src/inline.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -# -# Copyright 2001 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This quick script converts a text file into an #include-able header. -# It expects the name of the variable as its first argument, and reads -# stdin and writes stdout. - -varname="$1" -echo "const char $varname[] =" -od -t x1 -A n -v | sed -e 's|^[\t ]\{0,\}$||g; s|[\t ]\{1,\}| |g; s| \{1,\}$||g; s| |\\x|g; s|^|"|; s|$|"|' -echo ";" - diff --git a/ninja/src/lexer.cc b/ninja/src/lexer.cc deleted file mode 100644 index 43a3e0ad80a..00000000000 --- a/ninja/src/lexer.cc +++ /dev/null @@ -1,980 +0,0 @@ -/* Generated by re2c 2.0.3 */ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "lexer.h" - -#include - -#include "eval_env.h" -#include "util.h" - -bool Lexer::Error(const string& message, string* err) { - // Compute line/column. - int line = 1; - const char* line_start = input_.str_; - for (const char* p = input_.str_; p < last_token_; ++p) { - if (*p == '\n') { - ++line; - line_start = p + 1; - } - } - int col = last_token_ ? (int)(last_token_ - line_start) : 0; - - char buf[1024]; - snprintf(buf, sizeof(buf), "%s:%d: ", filename_.AsString().c_str(), line); - *err = buf; - *err += message + "\n"; - - // Add some context to the message. - const int kTruncateColumn = 72; - if (col > 0 && col < kTruncateColumn) { - int len; - bool truncated = true; - for (len = 0; len < kTruncateColumn; ++len) { - if (line_start[len] == 0 || line_start[len] == '\n') { - truncated = false; - break; - } - } - *err += string(line_start, len); - if (truncated) - *err += "..."; - *err += "\n"; - *err += string(col, ' '); - *err += "^ near here"; - } - - return false; -} - -Lexer::Lexer(const char* input) { - Start("input", input); -} - -void Lexer::Start(StringPiece filename, StringPiece input) { - filename_ = filename; - input_ = input; - ofs_ = input_.str_; - last_token_ = NULL; -} - -const char* Lexer::TokenName(Token t) { - switch (t) { - case ERROR: return "lexing error"; - case BUILD: return "'build'"; - case COLON: return "':'"; - case DEFAULT: return "'default'"; - case EQUALS: return "'='"; - case COLON_EQUAL: return "':='"; - case IDENT: return "identifier"; - case INCLUDE: return "'include'"; - case INDENT: return "indent"; - case NEWLINE: return "newline"; - case PIPE2: return "'||'"; - case PIPE: return "'|'"; - case POOL: return "'pool'"; - case RULE: return "'rule'"; - case SUBNINJA: return "'subninja'"; - case TEOF: return "eof"; - } - return NULL; // not reached -} - -const char* Lexer::TokenErrorHint(Token expected) { - switch (expected) { - case COLON: - return " ($ also escapes ':')"; - default: - return ""; - } -} - -string Lexer::DescribeLastError() { - if (last_token_) { - switch (last_token_[0]) { - case '\t': - return "tabs are not allowed, use spaces"; - } - } - return "lexing error"; -} - -void Lexer::UnreadToken() { - ofs_ = last_token_; -} - -Lexer::Token Lexer::ReadToken() { - const char* p = ofs_; - const char* q; - const char* start; - Lexer::Token token; - for (;;) { - start = p; - -{ - unsigned char yych; - unsigned int yyaccept = 0; - static const unsigned char yybm[] = { - 0, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 0, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 160, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 192, 192, 128, - 192, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 128, 128, 128, 128, 128, 128, - 128, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 128, 128, 128, 128, 192, - 128, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 192, 192, 192, 192, 192, - 192, 192, 192, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - }; - yych = *p; - if (yybm[0+yych] & 32) { - goto yy9; - } - if (yych <= '^') { - if (yych <= ',') { - if (yych <= '\f') { - if (yych <= 0x00) goto yy2; - if (yych == '\n') goto yy6; - goto yy4; - } else { - if (yych <= '\r') goto yy8; - if (yych == '#') goto yy12; - goto yy4; - } - } else { - if (yych <= ':') { - if (yych == '/') goto yy4; - if (yych <= '9') goto yy13; - goto yy16; - } else { - if (yych <= '=') { - if (yych <= '<') goto yy4; - goto yy18; - } else { - if (yych <= '@') goto yy4; - if (yych <= 'Z') goto yy13; - goto yy4; - } - } - } - } else { - if (yych <= 'n') { - if (yych <= 'b') { - if (yych == '`') goto yy4; - if (yych <= 'a') goto yy13; - goto yy20; - } else { - if (yych <= 'd') { - if (yych <= 'c') goto yy13; - goto yy21; - } else { - if (yych == 'i') goto yy22; - goto yy13; - } - } - } else { - if (yych <= 'r') { - if (yych <= 'o') goto yy23; - if (yych <= 'p') goto yy25; - if (yych <= 'q') goto yy13; - goto yy26; - } else { - if (yych <= 'z') { - if (yych <= 's') goto yy27; - goto yy13; - } else { - if (yych == '|') goto yy28; - goto yy4; - } - } - } - } -yy2: - ++p; - { token = TEOF; break; } -yy4: - ++p; -yy5: - { token = ERROR; break; } -yy6: - ++p; - { token = NEWLINE; break; } -yy8: - yych = *++p; - if (yych == '\n') goto yy30; - goto yy5; -yy9: - yyaccept = 0; - yych = *(q = ++p); - if (yybm[0+yych] & 32) { - goto yy9; - } - if (yych <= '\f') { - if (yych == '\n') goto yy6; - } else { - if (yych <= '\r') goto yy32; - if (yych == '#') goto yy34; - } -yy11: - { token = INDENT; break; } -yy12: - yyaccept = 1; - yych = *(q = ++p); - if (yych <= 0x00) goto yy5; - goto yy35; -yy13: - yych = *++p; -yy14: - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = IDENT; break; } -yy16: - yych = *++p; - if (yych == '=') goto yy38; - { token = COLON; break; } -yy18: - ++p; - { token = EQUALS; break; } -yy20: - yych = *++p; - if (yych == 'u') goto yy40; - goto yy14; -yy21: - yych = *++p; - if (yych == 'e') goto yy41; - goto yy14; -yy22: - yych = *++p; - if (yych == 'n') goto yy42; - goto yy14; -yy23: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = BUILD; break; } -yy25: - yych = *++p; - if (yych == 'o') goto yy43; - goto yy14; -yy26: - yych = *++p; - if (yych == 'u') goto yy44; - goto yy14; -yy27: - yych = *++p; - if (yych == 'u') goto yy45; - goto yy14; -yy28: - yych = *++p; - if (yych == '|') goto yy46; - { token = PIPE; break; } -yy30: - ++p; - { token = NEWLINE; break; } -yy32: - yych = *++p; - if (yych == '\n') goto yy30; -yy33: - p = q; - if (yyaccept == 0) { - goto yy11; - } else { - goto yy5; - } -yy34: - yych = *++p; -yy35: - if (yybm[0+yych] & 128) { - goto yy34; - } - if (yych <= 0x00) goto yy33; - ++p; - { continue; } -yy38: - ++p; - { token = COLON_EQUAL; break; } -yy40: - yych = *++p; - if (yych == 'i') goto yy48; - goto yy14; -yy41: - yych = *++p; - if (yych == 'f') goto yy49; - goto yy14; -yy42: - yych = *++p; - if (yych == 'c') goto yy50; - goto yy14; -yy43: - yych = *++p; - if (yych == 'o') goto yy51; - goto yy14; -yy44: - yych = *++p; - if (yych == 'l') goto yy52; - goto yy14; -yy45: - yych = *++p; - if (yych == 'b') goto yy53; - goto yy14; -yy46: - ++p; - { token = PIPE2; break; } -yy48: - yych = *++p; - if (yych == 'l') goto yy54; - goto yy14; -yy49: - yych = *++p; - if (yych == 'a') goto yy55; - goto yy14; -yy50: - yych = *++p; - if (yych == 'l') goto yy56; - goto yy14; -yy51: - yych = *++p; - if (yych == 'l') goto yy57; - goto yy14; -yy52: - yych = *++p; - if (yych == 'e') goto yy59; - goto yy14; -yy53: - yych = *++p; - if (yych == 'n') goto yy61; - goto yy14; -yy54: - yych = *++p; - if (yych == 'd') goto yy23; - goto yy14; -yy55: - yych = *++p; - if (yych == 'u') goto yy62; - goto yy14; -yy56: - yych = *++p; - if (yych == 'u') goto yy63; - goto yy14; -yy57: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = POOL; break; } -yy59: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = RULE; break; } -yy61: - yych = *++p; - if (yych == 'i') goto yy64; - goto yy14; -yy62: - yych = *++p; - if (yych == 'l') goto yy65; - goto yy14; -yy63: - yych = *++p; - if (yych == 'd') goto yy66; - goto yy14; -yy64: - yych = *++p; - if (yych == 'n') goto yy67; - goto yy14; -yy65: - yych = *++p; - if (yych == 't') goto yy68; - goto yy14; -yy66: - yych = *++p; - if (yych == 'e') goto yy70; - goto yy14; -yy67: - yych = *++p; - if (yych == 'j') goto yy72; - goto yy14; -yy68: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = DEFAULT; break; } -yy70: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = INCLUDE; break; } -yy72: - yych = *++p; - if (yych != 'a') goto yy14; - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy13; - } - { token = SUBNINJA; break; } -} - - } - - last_token_ = start; - ofs_ = p; - if (token != NEWLINE && token != TEOF) - EatWhitespace(); - return token; -} - -bool Lexer::PeekToken(Token token) { - Token t = ReadToken(); - if (t == token) - return true; - UnreadToken(); - return false; -} - -void Lexer::EatWhitespace() { - const char* p = ofs_; - const char* q; - for (;;) { - ofs_ = p; - -{ - unsigned char yych; - static const unsigned char yybm[] = { - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 128, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - }; - yych = *p; - if (yybm[0+yych] & 128) { - goto yy81; - } - if (yych <= 0x00) goto yy77; - if (yych == '$') goto yy84; - goto yy79; -yy77: - ++p; - { break; } -yy79: - ++p; -yy80: - { break; } -yy81: - yych = *++p; - if (yybm[0+yych] & 128) { - goto yy81; - } - { continue; } -yy84: - yych = *(q = ++p); - if (yych == '\n') goto yy85; - if (yych == '\r') goto yy87; - goto yy80; -yy85: - ++p; - { continue; } -yy87: - yych = *++p; - if (yych == '\n') goto yy89; - p = q; - goto yy80; -yy89: - ++p; - { continue; } -} - - } -} - -bool Lexer::EndAfterEatWhiteSpace(){ - const char* p = ofs_; - for (;;) { - ofs_ = p; - -{ - unsigned char yych; - static const unsigned char yybm[] = { - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 128, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - }; - yych = *p; - if (yybm[0+yych] & 128) { - goto yy100; - } - if (yych <= '\n') { - if (yych <= 0x00) goto yy93; - if (yych <= '\t') goto yy95; - goto yy97; - } else { - if (yych == '\r') goto yy99; - goto yy95; - } -yy93: - ++p; - { return true; } -yy95: - ++p; -yy96: - { return false; } -yy97: - ++p; - { continue; } -yy99: - yych = *++p; - if (yych == '\n') goto yy103; - goto yy96; -yy100: - yych = *++p; - if (yybm[0+yych] & 128) { - goto yy100; - } - { continue; } -yy103: - ++p; - { continue; } -} - - } -} - -bool Lexer::ReadIdent(string* out) { - const char* p = ofs_; - const char* start; - for (;;) { - start = p; - -{ - unsigned char yych; - static const unsigned char yybm[] = { - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 128, 128, 0, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 0, 0, 0, 0, 0, 0, - 0, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 0, 0, 0, 0, 128, - 0, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - }; - yych = *p; - if (yybm[0+yych] & 128) { - goto yy109; - } - ++p; - { - last_token_ = start; - return false; - } -yy109: - yych = *++p; - if (yybm[0+yych] & 128) { - goto yy109; - } - { - out->assign(start, p - start); - break; - } -} - - } - last_token_ = start; - ofs_ = p; - EatWhitespace(); - return true; -} - -bool Lexer::ReadSimplePath(string* out) { - const char* p = ofs_; - const char* start; - for (;;) { - start = p; - -{ - unsigned char yych; - static const unsigned char yybm[] = { - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 128, 0, 0, 0, 128, 0, 0, - 128, 128, 0, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 0, 0, 128, 0, 0, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 0, 128, - 0, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 0, 128, 128, 0, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - 128, 128, 128, 128, 128, 128, 128, 128, - }; - yych = *p; - if (yybm[0+yych] & 128) { - goto yy116; - } - ++p; - { - last_token_ = start; - return false; - } -yy116: - yych = *++p; - if (yybm[0+yych] & 128) { - goto yy116; - } - { - out->assign(start, p - start); - break; - } -} - - } - last_token_ = start; - ofs_ = p; - EatWhitespace(); - return true; -} - -bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) { - const char* p = ofs_; - const char* q; - const char* start; - for (;;) { - start = p; - -{ - unsigned char yych; - static const unsigned char yybm[] = { - 0, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 0, 16, 16, 0, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 32, 16, 16, 16, 0, 16, 16, 16, - 16, 16, 16, 16, 16, 208, 144, 16, - 208, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 0, 16, 16, 16, 16, 16, - 16, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 16, 16, 16, 16, 208, - 16, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 208, 208, 208, 208, 208, - 208, 208, 208, 16, 0, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 16, 16, - }; - yych = *p; - if (yybm[0+yych] & 16) { - goto yy123; - } - if (yych <= '\r') { - if (yych <= 0x00) goto yy121; - if (yych <= '\n') goto yy126; - goto yy128; - } else { - if (yych <= ' ') goto yy126; - if (yych <= '$') goto yy130; - goto yy126; - } -yy121: - ++p; - { - last_token_ = start; - return Error("unexpected EOF", err); - } -yy123: - yych = *++p; - if (yybm[0+yych] & 16) { - goto yy123; - } - { - eval->AddText(StringPiece(start, p - start)); - continue; - } -yy126: - ++p; - { - if (path) { - p = start; - break; - } else { - if (*start == '\n') - break; - eval->AddText(StringPiece(start, 1)); - continue; - } - } -yy128: - yych = *++p; - if (yych == '\n') goto yy131; - { - last_token_ = start; - return Error(DescribeLastError(), err); - } -yy130: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy143; - } - if (yych <= ' ') { - if (yych <= '\f') { - if (yych == '\n') goto yy135; - goto yy133; - } else { - if (yych <= '\r') goto yy138; - if (yych <= 0x1F) goto yy133; - goto yy139; - } - } else { - if (yych <= '/') { - if (yych == '$') goto yy141; - goto yy133; - } else { - if (yych <= ':') goto yy146; - if (yych <= '`') goto yy133; - if (yych <= '{') goto yy148; - goto yy133; - } - } -yy131: - ++p; - { - if (path) - p = start; - break; - } -yy133: - ++p; -yy134: - { - last_token_ = start; - return Error("bad $-escape (literal $ must be written as $$)", err); - } -yy135: - yych = *++p; - if (yybm[0+yych] & 32) { - goto yy135; - } - { - continue; - } -yy138: - yych = *++p; - if (yych == '\n') goto yy149; - goto yy134; -yy139: - ++p; - { - eval->AddText(StringPiece(" ", 1)); - continue; - } -yy141: - ++p; - { - eval->AddText(StringPiece("$", 1)); - continue; - } -yy143: - yych = *++p; - if (yybm[0+yych] & 64) { - goto yy143; - } - { - eval->AddSpecial(StringPiece(start + 1, p - start - 1)); - continue; - } -yy146: - ++p; - { - eval->AddText(StringPiece(":", 1)); - continue; - } -yy148: - yych = *(q = ++p); - if (yybm[0+yych] & 128) { - goto yy152; - } - goto yy134; -yy149: - yych = *++p; - if (yych == ' ') goto yy149; - { - continue; - } -yy152: - yych = *++p; - if (yybm[0+yych] & 128) { - goto yy152; - } - if (yych == '}') goto yy155; - p = q; - goto yy134; -yy155: - ++p; - { - eval->AddSpecial(StringPiece(start + 2, p - start - 3)); - continue; - } -} - - } - last_token_ = start; - ofs_ = p; - if (path) - EatWhitespace(); - // Non-path strings end in newlines, so there's no whitespace to eat. - return true; -} diff --git a/ninja/src/lexer.h b/ninja/src/lexer.h deleted file mode 100644 index 399d4855e53..00000000000 --- a/ninja/src/lexer.h +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_LEXER_H_ -#define NINJA_LEXER_H_ - -#include "string_piece.h" - -// Windows may #define ERROR. -#ifdef ERROR -#undef ERROR -#endif - -struct EvalString; - -struct Lexer { - Lexer() {} - /// Helper ctor useful for tests. - explicit Lexer(const char* input); - - enum Token { - ERROR, - BUILD, - COLON, - COLON_EQUAL, - DEFAULT, - EQUALS, - IDENT, - INCLUDE, - INDENT, - NEWLINE, - PIPE, - PIPE2, - POOL, - RULE, - SUBNINJA, - TEOF, - }; - - /// Return a human-readable form of a token, used in error messages. - static const char* TokenName(Token t); - - /// Return a human-readable token hint, used in error messages. - static const char* TokenErrorHint(Token expected); - - /// If the last token read was an ERROR token, provide more info - /// or the empty string. - string DescribeLastError(); - - /// Start parsing some input. - void Start(StringPiece filename, StringPiece input); - - /// Read a Token from the Token enum. - Token ReadToken(); - - /// Rewind to the last read Token. - void UnreadToken(); - - /// If the next token is \a token, read it and return true. - bool PeekToken(Token token); - - /// Read a simple identifier (a rule or variable name). - /// Returns false if a name can't be read. - bool ReadIdent(string* out); - - /// Read a path (complete with $escapes). - /// Returns false only on error, returned path may be empty if a delimiter - /// (space, newline) is hit. - bool ReadPath(EvalString* path, string* err) { - return ReadEvalString(path, true, err); - } - - bool ReadSimplePath(string *path); - bool EndAfterEatWhiteSpace(); - - /// Read the value side of a var = value line (complete with $escapes). - /// Returns false only on error. - bool ReadVarValue(EvalString* value, string* err) { - return ReadEvalString(value, false, err); - } - - /// Construct an error message with context. - bool Error(const string& message, string* err); - -private: - /// Skip past whitespace (called after each read token/ident/etc.). - void EatWhitespace(); - - /// Read a $-escaped string. - bool ReadEvalString(EvalString* eval, bool path, string* err); - - StringPiece filename_; - StringPiece input_; - const char* ofs_; - const char* last_token_; -}; - -#endif // NINJA_LEXER_H_ diff --git a/ninja/src/lexer.in.cc b/ninja/src/lexer.in.cc deleted file mode 100644 index 3bd26d861d3..00000000000 --- a/ninja/src/lexer.in.cc +++ /dev/null @@ -1,316 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "lexer.h" - -#include - -#include "eval_env.h" -#include "util.h" - -bool Lexer::Error(const string& message, string* err) { - // Compute line/column. - int line = 1; - const char* line_start = input_.str_; - for (const char* p = input_.str_; p < last_token_; ++p) { - if (*p == '\n') { - ++line; - line_start = p + 1; - } - } - int col = last_token_ ? (int)(last_token_ - line_start) : 0; - - char buf[1024]; - snprintf(buf, sizeof(buf), "%s:%d: ", filename_.AsString().c_str(), line); - *err = buf; - *err += message + "\n"; - - // Add some context to the message. - const int kTruncateColumn = 72; - if (col > 0 && col < kTruncateColumn) { - int len; - bool truncated = true; - for (len = 0; len < kTruncateColumn; ++len) { - if (line_start[len] == 0 || line_start[len] == '\n') { - truncated = false; - break; - } - } - *err += string(line_start, len); - if (truncated) - *err += "..."; - *err += "\n"; - *err += string(col, ' '); - *err += "^ near here"; - } - - return false; -} - -Lexer::Lexer(const char* input) { - Start("input", input); -} - -void Lexer::Start(StringPiece filename, StringPiece input) { - filename_ = filename; - input_ = input; - ofs_ = input_.str_; - last_token_ = NULL; -} - -const char* Lexer::TokenName(Token t) { - switch (t) { - case ERROR: return "lexing error"; - case BUILD: return "'build'"; - case COLON: return "':'"; - case DEFAULT: return "'default'"; - case EQUALS: return "'='"; - case COLON_EQUAL: return "':='"; - case IDENT: return "identifier"; - case INCLUDE: return "'include'"; - case INDENT: return "indent"; - case NEWLINE: return "newline"; - case PIPE2: return "'||'"; - case PIPE: return "'|'"; - case POOL: return "'pool'"; - case RULE: return "'rule'"; - case SUBNINJA: return "'subninja'"; - case TEOF: return "eof"; - } - return NULL; // not reached -} - -const char* Lexer::TokenErrorHint(Token expected) { - switch (expected) { - case COLON: - return " ($ also escapes ':')"; - default: - return ""; - } -} - -string Lexer::DescribeLastError() { - if (last_token_) { - switch (last_token_[0]) { - case '\t': - return "tabs are not allowed, use spaces"; - } - } - return "lexing error"; -} - -void Lexer::UnreadToken() { - ofs_ = last_token_; -} - -Lexer::Token Lexer::ReadToken() { - const char* p = ofs_; - const char* q; - const char* start; - Lexer::Token token; - for (;;) { - start = p; - /*!re2c - re2c:define:YYCTYPE = "unsigned char"; - re2c:define:YYCURSOR = p; - re2c:define:YYMARKER = q; - re2c:yyfill:enable = 0; - - nul = "\000"; - simple_varname = [a-zA-Z0-9_-]+; - varname = [a-zA-Z0-9_.-]+; - - [ ]*"#"[^\000\n]*"\n" { continue; } - [ ]*"\r\n" { token = NEWLINE; break; } - [ ]*"\n" { token = NEWLINE; break; } - [ ]+ { token = INDENT; break; } - "o" | "build" { token = BUILD; break; } - "pool" { token = POOL; break; } - "rule" { token = RULE; break; } - "default" { token = DEFAULT; break; } - "=" { token = EQUALS; break; } - ":" { token = COLON; break; } - ":=" { token = COLON_EQUAL; break; } - "||" { token = PIPE2; break; } - "|" { token = PIPE; break; } - "include" { token = INCLUDE; break; } - "subninja" { token = SUBNINJA; break; } - varname { token = IDENT; break; } - nul { token = TEOF; break; } - [^] { token = ERROR; break; } - */ - } - - last_token_ = start; - ofs_ = p; - if (token != NEWLINE && token != TEOF) - EatWhitespace(); - return token; -} - -bool Lexer::PeekToken(Token token) { - Token t = ReadToken(); - if (t == token) - return true; - UnreadToken(); - return false; -} - -void Lexer::EatWhitespace() { - const char* p = ofs_; - const char* q; - for (;;) { - ofs_ = p; - /*!re2c - [ ]+ { continue; } - "$\r\n" { continue; } - "$\n" { continue; } - nul { break; } - [^] { break; } - */ - } -} - -bool Lexer::EndAfterEatWhiteSpace(){ - const char* p = ofs_; - for (;;) { - ofs_ = p; - /*!re2c - [ ]+ { continue; } - "\r\n" { continue; } - "\n" { continue; } - nul { return true; } - [^] { return false; } - */ - } -} - -bool Lexer::ReadIdent(string* out) { - const char* p = ofs_; - const char* start; - for (;;) { - start = p; - /*!re2c - varname { - out->assign(start, p - start); - break; - } - [^] { - last_token_ = start; - return false; - } - */ - } - last_token_ = start; - ofs_ = p; - EatWhitespace(); - return true; -} - -bool Lexer::ReadSimplePath(string* out) { - const char* p = ofs_; - const char* start; - for (;;) { - start = p; - /*!re2c - [a-zA-Z0-9+,/_:.~()}{%@=!\x5b\x5c\x5d\x80-\xFF-]+ { - out->assign(start, p - start); - break; - } - [^] { - last_token_ = start; - return false; - } - */ - } - last_token_ = start; - ofs_ = p; - EatWhitespace(); - return true; -} - -bool Lexer::ReadEvalString(EvalString* eval, bool path, string* err) { - const char* p = ofs_; - const char* q; - const char* start; - for (;;) { - start = p; - /*!re2c - [^$ :\r\n|\000]+ { - eval->AddText(StringPiece(start, p - start)); - continue; - } - "\r\n" { - if (path) - p = start; - break; - } - [ :|\n] { - if (path) { - p = start; - break; - } else { - if (*start == '\n') - break; - eval->AddText(StringPiece(start, 1)); - continue; - } - } - "$$" { - eval->AddText(StringPiece("$", 1)); - continue; - } - "$ " { - eval->AddText(StringPiece(" ", 1)); - continue; - } - "$\r\n"[ ]* { - continue; - } - "$\n"[ ]* { - continue; - } - "${"varname"}" { - eval->AddSpecial(StringPiece(start + 2, p - start - 3)); - continue; - } - "$"simple_varname { - eval->AddSpecial(StringPiece(start + 1, p - start - 1)); - continue; - } - "$:" { - eval->AddText(StringPiece(":", 1)); - continue; - } - "$". { - last_token_ = start; - return Error("bad $-escape (literal $ must be written as $$)", err); - } - nul { - last_token_ = start; - return Error("unexpected EOF", err); - } - [^] { - last_token_ = start; - return Error(DescribeLastError(), err); - } - */ - } - last_token_ = start; - ofs_ = p; - if (path) - EatWhitespace(); - // Non-path strings end in newlines, so there's no whitespace to eat. - return true; -} diff --git a/ninja/src/lexer_test.cc b/ninja/src/lexer_test.cc deleted file mode 100644 index 331d8e1ea91..00000000000 --- a/ninja/src/lexer_test.cc +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "lexer.h" - -#include "eval_env.h" -#include "test.h" - -TEST(Lexer, ReadVarValue) { - Lexer lexer("plain text $var $VaR ${x}\n"); - EvalString eval; - string err; - EXPECT_TRUE(lexer.ReadVarValue(&eval, &err)); - EXPECT_EQ("", err); - EXPECT_EQ("[plain text ][$var][ ][$VaR][ ][$x]", - eval.Serialize()); -} - -TEST(Lexer, ReadEvalStringEscapes) { - Lexer lexer("$ $$ab c$: $\ncde\n"); - EvalString eval; - string err; - EXPECT_TRUE(lexer.ReadVarValue(&eval, &err)); - EXPECT_EQ("", err); - EXPECT_EQ("[ $ab c: cde]", - eval.Serialize()); -} - -TEST(Lexer, ReadIdent) { - Lexer lexer("foo baR baz_123 foo-bar"); - string ident; - EXPECT_TRUE(lexer.ReadIdent(&ident)); - EXPECT_EQ("foo", ident); - EXPECT_TRUE(lexer.ReadIdent(&ident)); - EXPECT_EQ("baR", ident); - EXPECT_TRUE(lexer.ReadIdent(&ident)); - EXPECT_EQ("baz_123", ident); - EXPECT_TRUE(lexer.ReadIdent(&ident)); - EXPECT_EQ("foo-bar", ident); -} - -TEST(Lexer, ReadIdentCurlies) { - // Verify that ReadIdent includes dots in the name, - // but in an expansion $bar.dots stops at the dot. - Lexer lexer("foo.dots $bar.dots ${bar.dots}\n"); - string ident; - EXPECT_TRUE(lexer.ReadIdent(&ident)); - EXPECT_EQ("foo.dots", ident); - - EvalString eval; - string err; - EXPECT_TRUE(lexer.ReadVarValue(&eval, &err)); - EXPECT_EQ("", err); - EXPECT_EQ("[$bar][.dots ][$bar.dots]", - eval.Serialize()); -} - -TEST(Lexer, Error) { - Lexer lexer("foo$\nbad $"); - EvalString eval; - string err; - ASSERT_FALSE(lexer.ReadVarValue(&eval, &err)); - EXPECT_EQ("input:2: bad $-escape (literal $ must be written as $$)\n" - "bad $\n" - " ^ near here" - , err); -} - -TEST(Lexer, CommentEOF) { - // Verify we don't run off the end of the string when the EOF is - // mid-comment. - Lexer lexer("# foo"); - Lexer::Token token = lexer.ReadToken(); - EXPECT_EQ(Lexer::ERROR, token); -} - -TEST(Lexer, Tabs) { - // Verify we print a useful error on a disallowed character. - Lexer lexer(" \tfoobar"); - Lexer::Token token = lexer.ReadToken(); - EXPECT_EQ(Lexer::INDENT, token); - token = lexer.ReadToken(); - EXPECT_EQ(Lexer::ERROR, token); - EXPECT_EQ("tabs are not allowed, use spaces", lexer.DescribeLastError()); -} diff --git a/ninja/src/line_printer.cc b/ninja/src/line_printer.cc deleted file mode 100644 index 3227de13975..00000000000 --- a/ninja/src/line_printer.cc +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "line_printer.h" - -#include -#include -#ifdef _WIN32 -#include -#ifndef ENABLE_VIRTUAL_TERMINAL_PROCESSING -#define ENABLE_VIRTUAL_TERMINAL_PROCESSING 0x4 -#endif -#else -#include -#include -#include -#include -#endif - -#include "util.h" - -LinePrinter::LinePrinter() : have_blank_line_(true), console_locked_(false) { - const char* term = getenv("TERM"); -#ifndef _WIN32 - smart_terminal_ = isatty(1) && term && string(term) != "dumb"; -#else - // Disable output buffer. It'd be nice to use line buffering but - // MSDN says: "For some systems, [_IOLBF] provides line - // buffering. However, for Win32, the behavior is the same as _IOFBF - // - Full Buffering." - if (term && string(term) == "dumb") { - smart_terminal_ = false; - } else { - setvbuf(stdout, NULL, _IONBF, 0); - console_ = GetStdHandle(STD_OUTPUT_HANDLE); - CONSOLE_SCREEN_BUFFER_INFO csbi; - smart_terminal_ = GetConsoleScreenBufferInfo(console_, &csbi); - } -#endif -#if 0 - supports_color_ = smart_terminal_; - if (!supports_color_) { - const char* clicolor_force = getenv("CLICOLOR_FORCE"); - supports_color_ = clicolor_force && string(clicolor_force) != "0"; - } -#else - supports_color_ = ShouldBeColorFul(smart_terminal_); -#endif -#ifdef _WIN32 - // Try enabling ANSI escape sequence support on Windows 10 terminals. - if (supports_color_) { - DWORD mode; - if (GetConsoleMode(console_, &mode)) { - SetConsoleMode(console_, mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING); - } - } -#endif -} - -void LinePrinter::Print(string to_print, LineType type) { - if (console_locked_) { - line_buffer_ = to_print; - line_type_ = type; - return; - } - - if (smart_terminal_) { - printf("\r"); // Print over previous line, if any. - // On Windows, calling a C library function writing to stdout also handles - // pausing the executable when the "Pause" key or Ctrl-S is pressed. - } - - if (smart_terminal_ && type == ELIDE) { -#ifdef _WIN32 - CONSOLE_SCREEN_BUFFER_INFO csbi; - GetConsoleScreenBufferInfo(console_, &csbi); - - to_print = ElideMiddle(to_print, static_cast(csbi.dwSize.X)); - // We don't want to have the cursor spamming back and forth, so instead of - // printf use WriteConsoleOutput which updates the contents of the buffer, - // but doesn't move the cursor position. - COORD buf_size = { csbi.dwSize.X, 1 }; - COORD zero_zero = { 0, 0 }; - SMALL_RECT target = { - csbi.dwCursorPosition.X, csbi.dwCursorPosition.Y, - static_cast(csbi.dwCursorPosition.X + csbi.dwSize.X - 1), - csbi.dwCursorPosition.Y - }; - vector char_data(csbi.dwSize.X); - for (size_t i = 0; i < static_cast(csbi.dwSize.X); ++i) { - char_data[i].Char.AsciiChar = i < to_print.size() ? to_print[i] : ' '; - char_data[i].Attributes = csbi.wAttributes; - } - WriteConsoleOutput(console_, &char_data[0], buf_size, zero_zero, &target); -#else - // Limit output to width of the terminal if provided so we don't cause - // line-wrapping. - winsize size; - if ((ioctl(STDOUT_FILENO, TIOCGWINSZ, &size) == 0) && size.ws_col) { - to_print = ElideMiddle(to_print, size.ws_col); - } - printf("%s", to_print.c_str()); - printf("\x1B[K"); // Clear to end of line. - fflush(stdout); -#endif - - have_blank_line_ = false; - } else { - printf("%s\n", to_print.c_str()); - } -} - -void LinePrinter::PrintOrBuffer(const char* data, size_t size) { - if (console_locked_) { - output_buffer_.append(data, size); - } else { - // Avoid printf and C strings, since the actual output might contain null - // bytes like UTF-16 does (yuck). - fwrite(data, 1, size, stdout); - } -} - -void LinePrinter::PrintOnNewLine(const string& to_print) { - if (console_locked_ && !line_buffer_.empty()) { - output_buffer_.append(line_buffer_); - output_buffer_.append(1, '\n'); - line_buffer_.clear(); - } - if (!have_blank_line_) { - PrintOrBuffer("\n", 1); - } - if (!to_print.empty()) { - PrintOrBuffer(&to_print[0], to_print.size()); - } - have_blank_line_ = to_print.empty() || *to_print.rbegin() == '\n'; -} - -void LinePrinter::SetConsoleLocked(bool locked) { - if (locked == console_locked_) - return; - - if (locked) - PrintOnNewLine(""); - - console_locked_ = locked; - - if (!locked) { - PrintOnNewLine(output_buffer_); - if (!line_buffer_.empty()) { - Print(line_buffer_, line_type_); - } - output_buffer_.clear(); - line_buffer_.clear(); - } -} diff --git a/ninja/src/line_printer.h b/ninja/src/line_printer.h deleted file mode 100644 index 92d4dc4480c..00000000000 --- a/ninja/src/line_printer.h +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_LINE_PRINTER_H_ -#define NINJA_LINE_PRINTER_H_ - -#include -#include -using namespace std; - -/// Prints lines of text, possibly overprinting previously printed lines -/// if the terminal supports it. -struct LinePrinter { - LinePrinter(); - - bool is_smart_terminal() const { return smart_terminal_; } - void set_smart_terminal(bool smart) { smart_terminal_ = smart; } - - bool supports_color() const { return supports_color_; } - - enum LineType { - FULL, - ELIDE - }; - /// Overprints the current line. If type is ELIDE, elides to_print to fit on - /// one line. - void Print(string to_print, LineType type); - - /// Prints a string on a new line, not overprinting previous output. - void PrintOnNewLine(const string& to_print); - - /// Lock or unlock the console. Any output sent to the LinePrinter while the - /// console is locked will not be printed until it is unlocked. - void SetConsoleLocked(bool locked); - - private: - /// Whether we can do fancy terminal control codes. - bool smart_terminal_; - - /// Whether we can use ISO 6429 (ANSI) color sequences. - bool supports_color_; - - /// Whether the caret is at the beginning of a blank line. - bool have_blank_line_; - - /// Whether console is locked. - bool console_locked_; - - /// Buffered current line while console is locked. - string line_buffer_; - - /// Buffered line type while console is locked. - LineType line_type_; - - /// Buffered console output while console is locked. - string output_buffer_; - -#ifdef _WIN32 - void* console_; -#endif - - /// Print the given data to the console, or buffer it if it is locked. - void PrintOrBuffer(const char *data, size_t size); -}; - -#endif // NINJA_LINE_PRINTER_H_ diff --git a/ninja/src/manifest_parser.cc b/ninja/src/manifest_parser.cc deleted file mode 100644 index ffd72c52ab6..00000000000 --- a/ninja/src/manifest_parser.cc +++ /dev/null @@ -1,411 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "manifest_parser.h" - -#include -#include -#include - -#include "graph.h" -#include "state.h" -#include "util.h" -#include "version.h" -#include "metrics.h" -#include "debug_flags.h" -#include "disk_interface.h" -ManifestParser::ManifestParser(State* state, DiskInterface* file_reader, - ManifestParserOptions options) - : Parser(state, file_reader), - options_(options), quiet_(false) { - env_ = &state->bindings_; -} - -bool ManifestParser::Parse(const string& filename, const string& input, - string* err) { - METRIC_RECORD("parse build.ninja"); - lexer_.Start(filename, input); - - for (;;) { - Lexer::Token token = lexer_.ReadToken(); - switch (token) { -/* - case Lexer::POOL: - if (!ParsePool(err)) - return false; - break; -*/ - case Lexer::BUILD: - if (!ParseEdge(err)) - return false; - break; - case Lexer::RULE: - if (!ParseRule(err)) - return false; - break; - case Lexer::DEFAULT: - if (!ParseDefault(err)) - return false; - break; - case Lexer::IDENT: { - lexer_.UnreadToken(); - string name; - EvalString let_value; - if(!lexer_.ReadIdent(&name)){ - return lexer_.Error("expected variable name",err); - } - if(lexer_.PeekToken(Lexer::COLON_EQUAL)){ - string value = env_->LookupVariable(name); - char buf[20]; - if(!lexer_.ReadVarValue(&let_value,err)) return false; - string file = let_value.Evaluate(env_); - TimeStamp mtime = 0; - if(!file.empty()) mtime = file_reader_->Stat(file, err); - snprintf(buf,sizeof(buf),"-" "%" PRIx64, mtime); - value.append(buf); - env_->AddBinding(name,value); - } else if(lexer_.PeekToken(Lexer::EQUALS)){ - if(!lexer_.ReadVarValue(&let_value,err)) return false; - if (name == "rescript") { - state_ -> rescript_mode_ = true; - } else if(name == "cleaner") { - state_ -> cleaner = let_value.Evaluate(env_); - } else { - string value = let_value.Evaluate(env_); - env_->AddBinding(name, value); - } - } else { - return lexer_.Error("expected = or := ", err); - } - break; - } - case Lexer::INCLUDE: - if (!ParseFileInclude(false, err)) - return false; - break; - case Lexer::SUBNINJA: - if (!ParseFileInclude(true, err)) - return false; - break; - case Lexer::ERROR: { - return lexer_.Error(lexer_.DescribeLastError(), err); - } - case Lexer::TEOF: - return true; - case Lexer::NEWLINE: - break; - default: - return lexer_.Error(string("unexpected ") + Lexer::TokenName(token), - err); - } - } - return false; // not reached -} - - -bool ManifestParser::ParsePool(string* err) { - string name; - if (!lexer_.ReadIdent(&name)) - return lexer_.Error("expected pool name", err); - - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - - if (state_->LookupPool(name) != NULL) - return lexer_.Error("duplicate pool '" + name + "'", err); - - int depth = -1; - - while (lexer_.PeekToken(Lexer::INDENT)) { - string key; - EvalString value; - if (!ParseLet(&key, &value, err)) - return false; - - if (key == "depth") { - string depth_string = value.Evaluate(env_); - depth = atol(depth_string.c_str()); - if (depth < 0) - return lexer_.Error("invalid pool depth", err); - } else { - return lexer_.Error("unexpected variable '" + key + "'", err); - } - } - - if (depth < 0) - return lexer_.Error("expected 'depth =' line", err); - - state_->AddPool(new Pool(name, depth)); - return true; -} - - -bool ManifestParser::ParseRule(string* err) { - string name; - if (!lexer_.ReadIdent(&name)) - return lexer_.Error("expected rule name", err); - - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - - if (env_->LookupRuleCurrentScope(name) != NULL) - return lexer_.Error("duplicate rule '" + name + "'", err); - - Rule* rule = new Rule(name); // XXX scoped_ptr - - while (lexer_.PeekToken(Lexer::INDENT)) { - string key; - EvalString value; - if (!ParseLet(&key, &value, err)) - return false; - - if (Rule::IsReservedBinding(key)) { - rule->AddBinding(key, value); - } else { - // Die on other keyvals for now; revisit if we want to add a - // scope here. - return lexer_.Error("unexpected variable '" + key + "'", err); - } - } - - if (rule->bindings_["rspfile"].empty() != - rule->bindings_["rspfile_content"].empty()) { - return lexer_.Error("rspfile and rspfile_content need to be " - "both specified", err); - } - - if (rule->bindings_["command"].empty()) - return lexer_.Error("expected 'command =' line", err); - - env_->AddRule(rule); - return true; -} - -bool ManifestParser::ParseLet(string* key, EvalString* value, string* err) { - if (!lexer_.ReadIdent(key)) - return lexer_.Error("expected variable name", err); - if (!ExpectToken(Lexer::EQUALS, err)) - return false; - if (!lexer_.ReadVarValue(value, err)) - return false; - return true; -} - -bool ManifestParser::ParseDefault(string* err) { - EvalString eval; - if (!lexer_.ReadPath(&eval, err)) - return false; - if (eval.empty()) - return lexer_.Error("expected target name", err); - - do { - string path = eval.Evaluate(env_); - string path_err; - uint64_t slash_bits; // Unused because this only does lookup. - if (!CanonicalizePath(&path, &slash_bits, &path_err)) - return lexer_.Error(path_err, err); - if (!state_->AddDefault(path, &path_err)) - return lexer_.Error(path_err, err); - - eval.Clear(); - if (!lexer_.ReadPath(&eval, err)) - return false; - } while (!eval.empty()); - - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - - return true; -} - -bool ManifestParser::ParseEdge(string* err) { - METRIC_RECORD("parse Edge"); - vector ins, outs; - - { - EvalString out; - if (!lexer_.ReadPath(&out, err)) - return false; - while (!out.empty()) { - outs.push_back(out); - - out.Clear(); - if (!lexer_.ReadPath(&out, err)) - return false; - } - } - - // Add all implicit outs, counting how many as we go. - int implicit_outs = 0; - if (lexer_.PeekToken(Lexer::PIPE)) { - for (;;) { - EvalString out; - if (!lexer_.ReadPath(&out, err)) - return err; - if (out.empty()) - break; - outs.push_back(out); - ++implicit_outs; - } - } - - if (outs.empty()) - return lexer_.Error("expected path", err); - - if (!ExpectToken(Lexer::COLON, err)) - return false; - - string rule_name; - if (!lexer_.ReadIdent(&rule_name)) - return lexer_.Error("expected build command name", err); - - const Rule* rule = env_->LookupRule(rule_name); - if (!rule) - return lexer_.Error("unknown build rule '" + rule_name + "'", err); - - for (;;) { - // XXX should we require one path here? - EvalString in; - if (!lexer_.ReadPath(&in, err)) - return false; - if (in.empty()) - break; - ins.push_back(in); - } - - // Add all implicit deps, counting how many as we go. - int implicit = 0; - if (lexer_.PeekToken(Lexer::PIPE)) { - for (;;) { - EvalString in; - if (!lexer_.ReadPath(&in, err)) - return err; - if (in.empty()) - break; - ins.push_back(in); - ++implicit; - } - } - - // Add all order-only deps, counting how many as we go. - int order_only = 0; - if (lexer_.PeekToken(Lexer::PIPE2)) { - for (;;) { - EvalString in; - if (!lexer_.ReadPath(&in, err)) - return false; - if (in.empty()) - break; - ins.push_back(in); - ++order_only; - } - } - - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - - // Bindings on edges are rare, so allocate per-edge envs only when needed. - bool has_indent_token = lexer_.PeekToken(Lexer::INDENT); - BindingEnv* env = has_indent_token ? new BindingEnv(env_) : env_; - while (has_indent_token) { - string key; - EvalString val; - if (!ParseLet(&key, &val, err)) - return false; - - env->AddBinding(key, val.Evaluate(env_)); - has_indent_token = lexer_.PeekToken(Lexer::INDENT); - } - -#if 1 - if (IgnoreGenerator() && rule->GetBinding("generator")){ - return true; - } -#endif - - Edge* edge = state_->AddEdge(rule); - edge->env_ = env; - edge->outputs_.reserve(outs.size()); - for (size_t i = 0, e = outs.size(); i != e; ++i) { - string path = outs[i].Evaluate(env); - string path_err; - uint64_t slash_bits; - if (!CanonicalizePath(&path, &slash_bits, &path_err)) - return lexer_.Error(path_err, err); - if (!state_->AddOut(edge, path, slash_bits)) { - lexer_.Error("multiple rules generate " + path, err); - return false; - } - } - - edge->implicit_outs_ = implicit_outs; - size_t inputs_reserve_size = ins.size(); - string dyndep; - bool hasDynDep = rule -> GetBinding("dyndep"); - if (hasDynDep) { - implicit++; - inputs_reserve_size++; - } - - edge->inputs_.reserve(inputs_reserve_size); - uint64_t dyndep_slash_bits = 0; - - for (vector::iterator i = ins.begin(); i != ins.end(); ++i) { - string path = i->Evaluate(env); - string path_err; - uint64_t slash_bits; - if (!CanonicalizePath(&path, &slash_bits, &path_err)) - return lexer_.Error(path_err, err); - state_->AddIn(edge, path, slash_bits); - if(hasDynDep && i == ins.begin()){ - dyndep = path.substr(0,path.find_last_of('.')) + ".d"; - dyndep_slash_bits = slash_bits; - } - } - edge->implicit_deps_ = implicit; - edge->order_only_deps_ = order_only; - - // Lookup, validate, and save any dyndep binding. It will be used later - // to load generated dependency information dynamically, but it must - // be one of our manifest-specified inputs. - if (hasDynDep) { - edge->dyndep_ = state_->GetNode(dyndep, dyndep_slash_bits); - edge->dyndep_->set_dyndep_pending(true); - state_->AddIn(edge, dyndep, dyndep_slash_bits); - } - - return true; -} - -bool ManifestParser::ParseFileInclude(bool new_scope, string* err) { - EvalString eval; - if (!lexer_.ReadPath(&eval, err)) - return false; - string path = eval.Evaluate(env_); - - ManifestParser subparser(state_, file_reader_, options_); - if (new_scope) { - subparser.env_ = new BindingEnv(env_); - } else { - subparser.env_ = env_; - } - - if (!subparser.Load(path, err, &lexer_)) - return false; - - if (!ExpectToken(Lexer::NEWLINE, err)) - return false; - - return true; -} diff --git a/ninja/src/manifest_parser.h b/ninja/src/manifest_parser.h deleted file mode 100644 index 28f176171cb..00000000000 --- a/ninja/src/manifest_parser.h +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_MANIFEST_PARSER_H_ -#define NINJA_MANIFEST_PARSER_H_ - -#include "parser.h" - -struct BindingEnv; -struct EvalString; - -enum DupeEdgeAction { - kDupeEdgeActionWarn, - kDupeEdgeActionError, -}; - -enum PhonyCycleAction { - kPhonyCycleActionWarn, - kPhonyCycleActionError, -}; - -struct ManifestParserOptions { - ManifestParserOptions() - : dupe_edge_action_(kDupeEdgeActionWarn), - phony_cycle_action_(kPhonyCycleActionWarn) {} - DupeEdgeAction dupe_edge_action_; - PhonyCycleAction phony_cycle_action_; -}; - -/// Parses .ninja files. -struct ManifestParser : public Parser { - ManifestParser(State* state, DiskInterface* file_reader, - ManifestParserOptions options = ManifestParserOptions()); - - /// Parse a text string of input. Used by tests. - bool ParseTest(const string& input, string* err) { - quiet_ = true; - return Parse("input", input, err); - } - -private: - /// Parse a file, given its contents as a string. - bool Parse(const string& filename, const string& input, string* err); - - /// Parse various statement types. - bool ParsePool(string* err); - bool ParseRule(string* err); - bool ParseLet(string* key, EvalString* val, string* err); - bool ParseEdge(string* err); - bool ParseDefault(string* err); - - /// Parse either a 'subninja' or 'include' line. - bool ParseFileInclude(bool new_scope, string* err); - - BindingEnv* env_; - ManifestParserOptions options_; - bool quiet_; -}; - -#endif // NINJA_MANIFEST_PARSER_H_ diff --git a/ninja/src/manifest_parser_perftest.cc b/ninja/src/manifest_parser_perftest.cc deleted file mode 100644 index 67d11f91664..00000000000 --- a/ninja/src/manifest_parser_perftest.cc +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2014 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Tests manifest parser performance. Expects to be run in ninja's root -// directory. - -#include - -#include -#include -#include -#include - -#ifdef _WIN32 -#include "getopt.h" -#include -#else -#include -#include -#endif - -#include "disk_interface.h" -#include "graph.h" -#include "manifest_parser.h" -#include "metrics.h" -#include "state.h" -#include "util.h" - -bool WriteFakeManifests(const string& dir, string* err) { - RealDiskInterface disk_interface; - TimeStamp mtime = disk_interface.Stat(dir + "/build.ninja", err); - if (mtime != 0) // 0 means that the file doesn't exist yet. - return mtime != -1; - - string command = "python misc/write_fake_manifests.py " + dir; - printf("Creating manifest data..."); fflush(stdout); - int exit_code = system(command.c_str()); - printf("done.\n"); - if (exit_code != 0) - *err = "Failed to run " + command; - return exit_code == 0; -} - -int LoadManifests(bool measure_command_evaluation) { - string err; - RealDiskInterface disk_interface; - State state; - ManifestParser parser(&state, &disk_interface); - if (!parser.Load("build.ninja", &err)) { - fprintf(stderr, "Failed to read test data: %s\n", err.c_str()); - exit(1); - } - // Doing an empty build involves reading the manifest and evaluating all - // commands required for the requested targets. So include command - // evaluation in the perftest by default. - int optimization_guard = 0; - if (measure_command_evaluation) - for (size_t i = 0; i < state.edges_.size(); ++i) - optimization_guard += state.edges_[i]->EvaluateCommand().size(); - return optimization_guard; -} - -int main(int argc, char* argv[]) { - bool measure_command_evaluation = true; - int opt; - while ((opt = getopt(argc, argv, const_cast("fh"))) != -1) { - switch (opt) { - case 'f': - measure_command_evaluation = false; - break; - case 'h': - default: - printf("usage: manifest_parser_perftest\n" -"\n" -"options:\n" -" -f only measure manifest load time, not command evaluation time\n" - ); - return 1; - } - } - - const char kManifestDir[] = "build/manifest_perftest"; - - string err; - if (!WriteFakeManifests(kManifestDir, &err)) { - fprintf(stderr, "Failed to write test data: %s\n", err.c_str()); - return 1; - } - - if (chdir(kManifestDir) < 0) - Fatal("chdir: %s", strerror(errno)); - - const int kNumRepetitions = 5; - vector times; - for (int i = 0; i < kNumRepetitions; ++i) { - int64_t start = GetTimeMillis(); - int optimization_guard = LoadManifests(measure_command_evaluation); - int delta = (int)(GetTimeMillis() - start); - printf("%dms (hash: %x)\n", delta, optimization_guard); - times.push_back(delta); - } - - int min = *min_element(times.begin(), times.end()); - int max = *max_element(times.begin(), times.end()); - float total = accumulate(times.begin(), times.end(), 0.0f); - printf("min %dms max %dms avg %.1fms\n", min, max, total / times.size()); -} diff --git a/ninja/src/manifest_parser_test.cc b/ninja/src/manifest_parser_test.cc deleted file mode 100644 index f2b746790a8..00000000000 --- a/ninja/src/manifest_parser_test.cc +++ /dev/null @@ -1,1157 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "manifest_parser.h" - -#include -#include - -#include "graph.h" -#include "state.h" -#include "test.h" - -struct ParserTest : public testing::Test { - void AssertParse(const char* input) { - ManifestParser parser(&state, &fs_); - string err; - EXPECT_TRUE(parser.ParseTest(input, &err)); - ASSERT_EQ("", err); - VerifyGraph(state); - } - - State state; - VirtualFileSystem fs_; -}; - -TEST_F(ParserTest, Empty) { - ASSERT_NO_FATAL_FAILURE(AssertParse("")); -} - -TEST_F(ParserTest, Rules) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"\n" -"rule date\n" -" command = date > $out\n" -"\n" -"build result: cat in_1.cc in-2.O\n")); - - ASSERT_EQ(3u, state.bindings_.GetRules().size()); - const Rule* rule = state.bindings_.GetRules().begin()->second; - EXPECT_EQ("cat", rule->name()); - EXPECT_EQ("[cat ][$in][ > ][$out]", - rule->GetBinding("command")->Serialize()); -} - -TEST_F(ParserTest, RuleAttributes) { - // Check that all of the allowed rule attributes are parsed ok. - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = a\n" -" depfile = a\n" -" deps = a\n" -" description = a\n" -" generator = a\n" -" restat = a\n" -" rspfile = a\n" -" rspfile_content = a\n" -)); -} - -TEST_F(ParserTest, IgnoreIndentedComments) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -" #indented comment\n" -"rule cat\n" -" command = cat $in > $out\n" -" #generator = 1\n" -" restat = 1 # comment\n" -" #comment\n" -"build result: cat in_1.cc in-2.O\n" -" #comment\n")); - - ASSERT_EQ(2u, state.bindings_.GetRules().size()); - const Rule* rule = state.bindings_.GetRules().begin()->second; - EXPECT_EQ("cat", rule->name()); - Edge* edge = state.GetNode("result", 0)->in_edge(); - EXPECT_TRUE(edge->GetBindingBool("restat")); - EXPECT_FALSE(edge->GetBindingBool("generator")); -} - -TEST_F(ParserTest, IgnoreIndentedBlankLines) { - // the indented blanks used to cause parse errors - ASSERT_NO_FATAL_FAILURE(AssertParse( -" \n" -"rule cat\n" -" command = cat $in > $out\n" -" \n" -"build result: cat in_1.cc in-2.O\n" -" \n" -"variable=1\n")); - - // the variable must be in the top level environment - EXPECT_EQ("1", state.bindings_.LookupVariable("variable")); -} - -TEST_F(ParserTest, ResponseFiles) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat_rsp\n" -" command = cat $rspfile > $out\n" -" rspfile = $rspfile\n" -" rspfile_content = $in\n" -"\n" -"build out: cat_rsp in\n" -" rspfile=out.rsp\n")); - - ASSERT_EQ(2u, state.bindings_.GetRules().size()); - const Rule* rule = state.bindings_.GetRules().begin()->second; - EXPECT_EQ("cat_rsp", rule->name()); - EXPECT_EQ("[cat ][$rspfile][ > ][$out]", - rule->GetBinding("command")->Serialize()); - EXPECT_EQ("[$rspfile]", rule->GetBinding("rspfile")->Serialize()); - EXPECT_EQ("[$in]", rule->GetBinding("rspfile_content")->Serialize()); -} - -TEST_F(ParserTest, InNewline) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat_rsp\n" -" command = cat $in_newline > $out\n" -"\n" -"build out: cat_rsp in in2\n" -" rspfile=out.rsp\n")); - - ASSERT_EQ(2u, state.bindings_.GetRules().size()); - const Rule* rule = state.bindings_.GetRules().begin()->second; - EXPECT_EQ("cat_rsp", rule->name()); - EXPECT_EQ("[cat ][$in_newline][ > ][$out]", - rule->GetBinding("command")->Serialize()); - - Edge* edge = state.edges_[0]; - EXPECT_EQ("cat in\nin2 > out", edge->EvaluateCommand()); -} - -TEST_F(ParserTest, Variables) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"l = one-letter-test\n" -"rule link\n" -" command = ld $l $extra $with_under -o $out $in\n" -"\n" -"extra = -pthread\n" -"with_under = -under\n" -"build a: link b c\n" -"nested1 = 1\n" -"nested2 = $nested1/2\n" -"build supernested: link x\n" -" extra = $nested2/3\n")); - - ASSERT_EQ(2u, state.edges_.size()); - Edge* edge = state.edges_[0]; - EXPECT_EQ("ld one-letter-test -pthread -under -o a b c", - edge->EvaluateCommand()); - EXPECT_EQ("1/2", state.bindings_.LookupVariable("nested2")); - - edge = state.edges_[1]; - EXPECT_EQ("ld one-letter-test 1/2/3 -under -o supernested x", - edge->EvaluateCommand()); -} - -TEST_F(ParserTest, VariableScope) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"foo = bar\n" -"rule cmd\n" -" command = cmd $foo $in $out\n" -"\n" -"build inner: cmd a\n" -" foo = baz\n" -"build outer: cmd b\n" -"\n" // Extra newline after build line tickles a regression. -)); - - ASSERT_EQ(2u, state.edges_.size()); - EXPECT_EQ("cmd baz a inner", state.edges_[0]->EvaluateCommand()); - EXPECT_EQ("cmd bar b outer", state.edges_[1]->EvaluateCommand()); -} - -TEST_F(ParserTest, Continuation) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule link\n" -" command = foo bar $\n" -" baz\n" -"\n" -"build a: link c $\n" -" d e f\n")); - - ASSERT_EQ(2u, state.bindings_.GetRules().size()); - const Rule* rule = state.bindings_.GetRules().begin()->second; - EXPECT_EQ("link", rule->name()); - EXPECT_EQ("[foo bar baz]", rule->GetBinding("command")->Serialize()); -} - -TEST_F(ParserTest, Backslash) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"foo = bar\\baz\n" -"foo2 = bar\\ baz\n" -)); - EXPECT_EQ("bar\\baz", state.bindings_.LookupVariable("foo")); - EXPECT_EQ("bar\\ baz", state.bindings_.LookupVariable("foo2")); -} - -TEST_F(ParserTest, Comment) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"# this is a comment\n" -"foo = not # a comment\n")); - EXPECT_EQ("not # a comment", state.bindings_.LookupVariable("foo")); -} - -TEST_F(ParserTest, Dollars) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule foo\n" -" command = ${out}bar$$baz$$$\n" -"blah\n" -"x = $$dollar\n" -"build $x: foo y\n" -)); - EXPECT_EQ("$dollar", state.bindings_.LookupVariable("x")); -#ifdef _WIN32 - EXPECT_EQ("$dollarbar$baz$blah", state.edges_[0]->EvaluateCommand()); -#else - EXPECT_EQ("'$dollar'bar$baz$blah", state.edges_[0]->EvaluateCommand()); -#endif -} - -TEST_F(ParserTest, EscapeSpaces) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule spaces\n" -" command = something\n" -"build foo$ bar: spaces $$one two$$$ three\n" -)); - EXPECT_TRUE(state.LookupNode("foo bar")); - EXPECT_EQ(state.edges_[0]->outputs_[0]->path(), "foo bar"); - EXPECT_EQ(state.edges_[0]->inputs_[0]->path(), "$one"); - EXPECT_EQ(state.edges_[0]->inputs_[1]->path(), "two$ three"); - EXPECT_EQ(state.edges_[0]->EvaluateCommand(), "something"); -} - -TEST_F(ParserTest, CanonicalizeFile) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build out: cat in/1 in//2\n" -"build in/1: cat\n" -"build in/2: cat\n")); - - EXPECT_TRUE(state.LookupNode("in/1")); - EXPECT_TRUE(state.LookupNode("in/2")); - EXPECT_FALSE(state.LookupNode("in//1")); - EXPECT_FALSE(state.LookupNode("in//2")); -} - -#ifdef _WIN32 -TEST_F(ParserTest, CanonicalizeFileBackslashes) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build out: cat in\\1 in\\\\2\n" -"build in\\1: cat\n" -"build in\\2: cat\n")); - - Node* node = state.LookupNode("in/1");; - EXPECT_TRUE(node); - EXPECT_EQ(1, node->slash_bits()); - node = state.LookupNode("in/2"); - EXPECT_TRUE(node); - EXPECT_EQ(1, node->slash_bits()); - EXPECT_FALSE(state.LookupNode("in//1")); - EXPECT_FALSE(state.LookupNode("in//2")); -} -#endif - -TEST_F(ParserTest, PathVariables) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"dir = out\n" -"build $dir/exe: cat src\n")); - - EXPECT_FALSE(state.LookupNode("$dir/exe")); - EXPECT_TRUE(state.LookupNode("out/exe")); -} - -TEST_F(ParserTest, CanonicalizePaths) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build ./out.o: cat ./bar/baz/../foo.cc\n")); - - EXPECT_FALSE(state.LookupNode("./out.o")); - EXPECT_TRUE(state.LookupNode("out.o")); - EXPECT_FALSE(state.LookupNode("./bar/baz/../foo.cc")); - EXPECT_TRUE(state.LookupNode("bar/foo.cc")); -} - -#ifdef _WIN32 -TEST_F(ParserTest, CanonicalizePathsBackslashes) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build ./out.o: cat ./bar/baz/../foo.cc\n" -"build .\\out2.o: cat .\\bar/baz\\..\\foo.cc\n" -"build .\\out3.o: cat .\\bar\\baz\\..\\foo3.cc\n" -)); - - EXPECT_FALSE(state.LookupNode("./out.o")); - EXPECT_FALSE(state.LookupNode(".\\out2.o")); - EXPECT_FALSE(state.LookupNode(".\\out3.o")); - EXPECT_TRUE(state.LookupNode("out.o")); - EXPECT_TRUE(state.LookupNode("out2.o")); - EXPECT_TRUE(state.LookupNode("out3.o")); - EXPECT_FALSE(state.LookupNode("./bar/baz/../foo.cc")); - EXPECT_FALSE(state.LookupNode(".\\bar/baz\\..\\foo.cc")); - EXPECT_FALSE(state.LookupNode(".\\bar/baz\\..\\foo3.cc")); - Node* node = state.LookupNode("bar/foo.cc"); - EXPECT_TRUE(node); - EXPECT_EQ(0, node->slash_bits()); - node = state.LookupNode("bar/foo3.cc"); - EXPECT_TRUE(node); - EXPECT_EQ(1, node->slash_bits()); -} -#endif - -TEST_F(ParserTest, DuplicateEdgeWithMultipleOutputs) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build out1 out2: cat in1\n" -"build out1: cat in2\n" -"build final: cat out1\n" -)); - // AssertParse() checks that the generated build graph is self-consistent. - // That's all the checking that this test needs. -} - -TEST_F(ParserTest, NoDeadPointerFromDuplicateEdge) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build out: cat in\n" -"build out: cat in\n" -)); - // AssertParse() checks that the generated build graph is self-consistent. - // That's all the checking that this test needs. -} - -TEST_F(ParserTest, DuplicateEdgeWithMultipleOutputsError) { - const char kInput[] = -"rule cat\n" -" command = cat $in > $out\n" -"build out1 out2: cat in1\n" -"build out1: cat in2\n" -"build final: cat out1\n"; - ManifestParserOptions parser_opts; - parser_opts.dupe_edge_action_ = kDupeEdgeActionError; - ManifestParser parser(&state, &fs_, parser_opts); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("input:5: multiple rules generate out1 [-w dupbuild=err]\n", err); -} - -TEST_F(ParserTest, DuplicateEdgeInIncludedFile) { - fs_.Create("sub.ninja", - "rule cat\n" - " command = cat $in > $out\n" - "build out1 out2: cat in1\n" - "build out1: cat in2\n" - "build final: cat out1\n"); - const char kInput[] = - "subninja sub.ninja\n"; - ManifestParserOptions parser_opts; - parser_opts.dupe_edge_action_ = kDupeEdgeActionError; - ManifestParser parser(&state, &fs_, parser_opts); - string err; - EXPECT_FALSE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("sub.ninja:5: multiple rules generate out1 [-w dupbuild=err]\n", - err); -} - -TEST_F(ParserTest, PhonySelfReferenceIgnored) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"build a: phony a\n" -)); - - Node* node = state.LookupNode("a"); - Edge* edge = node->in_edge(); - ASSERT_TRUE(edge->inputs_.empty()); -} - -TEST_F(ParserTest, PhonySelfReferenceKept) { - const char kInput[] = -"build a: phony a\n"; - ManifestParserOptions parser_opts; - parser_opts.phony_cycle_action_ = kPhonyCycleActionError; - ManifestParser parser(&state, &fs_, parser_opts); - string err; - EXPECT_TRUE(parser.ParseTest(kInput, &err)); - EXPECT_EQ("", err); - - Node* node = state.LookupNode("a"); - Edge* edge = node->in_edge(); - ASSERT_EQ(edge->inputs_.size(), 1); - ASSERT_EQ(edge->inputs_[0], node); -} - -TEST_F(ParserTest, ReservedWords) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule build\n" -" command = rule run $out\n" -"build subninja: build include default foo.cc\n" -"default subninja\n")); -} - -TEST_F(ParserTest, Errors) { - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest(string("subn", 4), &err)); - EXPECT_EQ("input:1: expected '=', got eof\n" - "subn\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("foobar", &err)); - EXPECT_EQ("input:1: expected '=', got eof\n" - "foobar\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x 3", &err)); - EXPECT_EQ("input:1: expected '=', got identifier\n" - "x 3\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x = 3", &err)); - EXPECT_EQ("input:1: unexpected EOF\n" - "x = 3\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x = 3\ny 2", &err)); - EXPECT_EQ("input:2: expected '=', got identifier\n" - "y 2\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x = $", &err)); - EXPECT_EQ("input:1: bad $-escape (literal $ must be written as $$)\n" - "x = $\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x = $\n $[\n", &err)); - EXPECT_EQ("input:2: bad $-escape (literal $ must be written as $$)\n" - " $[\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("x = a$\n b$\n $\n", &err)); - EXPECT_EQ("input:4: unexpected EOF\n" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("build\n", &err)); - EXPECT_EQ("input:1: expected path\n" - "build\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("build x: y z\n", &err)); - EXPECT_EQ("input:1: unknown build rule 'y'\n" - "build x: y z\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("build x:: y z\n", &err)); - EXPECT_EQ("input:1: expected build command name\n" - "build x:: y z\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n command = cat ok\n" - "build x: cat $\n :\n", - &err)); - EXPECT_EQ("input:4: expected newline, got ':'\n" - " :\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n", - &err)); - EXPECT_EQ("input:2: expected 'command =' line\n", err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n" - " command = echo\n" - "rule cat\n" - " command = echo\n", &err)); - EXPECT_EQ("input:3: duplicate rule 'cat'\n" - "rule cat\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n" - " command = echo\n" - " rspfile = cat.rsp\n", &err)); - EXPECT_EQ( - "input:4: rspfile and rspfile_content need to be both specified\n", - err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n" - " command = ${fafsd\n" - "foo = bar\n", - &err)); - EXPECT_EQ("input:2: bad $-escape (literal $ must be written as $$)\n" - " command = ${fafsd\n" - " ^ near here" - , err); - } - - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n" - " command = cat\n" - "build $.: cat foo\n", - &err)); - EXPECT_EQ("input:3: bad $-escape (literal $ must be written as $$)\n" - "build $.: cat foo\n" - " ^ near here" - , err); - } - - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cat\n" - " command = cat\n" - "build $: cat foo\n", - &err)); - EXPECT_EQ("input:3: expected ':', got newline ($ also escapes ':')\n" - "build $: cat foo\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule %foo\n", - &err)); - EXPECT_EQ("input:1: expected rule name\n" - "rule %foo\n" - " ^ near here", - err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n" - " command = foo\n" - " othervar = bar\n", - &err)); - EXPECT_EQ("input:3: unexpected variable 'othervar'\n" - " othervar = bar\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n" - "build $.: cc bar.cc\n", - &err)); - EXPECT_EQ("input:3: bad $-escape (literal $ must be written as $$)\n" - "build $.: cc bar.cc\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n && bar", - &err)); - EXPECT_EQ("input:3: expected variable name\n" - " && bar\n" - " ^ near here", - err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n" - "build $: cc bar.cc\n", - &err)); - EXPECT_EQ("input:3: expected ':', got newline ($ also escapes ':')\n" - "build $: cc bar.cc\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("default\n", - &err)); - EXPECT_EQ("input:1: expected target name\n" - "default\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("default nonexistent\n", - &err)); - EXPECT_EQ("input:1: unknown target 'nonexistent'\n" - "default nonexistent\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule r\n command = r\n" - "build b: r\n" - "default b:\n", - &err)); - EXPECT_EQ("input:4: expected newline, got ':'\n" - "default b:\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("default $a\n", &err)); - EXPECT_EQ("input:1: empty path\n" - "default $a\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule r\n" - " command = r\n" - "build $a: r $c\n", &err)); - // XXX the line number is wrong; we should evaluate paths in ParseEdge - // as we see them, not after we've read them all! - EXPECT_EQ("input:4: empty path\n", err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - // the indented blank line must terminate the rule - // this also verifies that "unexpected (token)" errors are correct - EXPECT_FALSE(parser.ParseTest("rule r\n" - " command = r\n" - " \n" - " generator = 1\n", &err)); - EXPECT_EQ("input:4: unexpected indent\n", err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("pool\n", &err)); - EXPECT_EQ("input:1: expected pool name\n" - "pool\n" - " ^ near here", err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("pool foo\n", &err)); - EXPECT_EQ("input:2: expected 'depth =' line\n", err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("pool foo\n" - " depth = 4\n" - "pool foo\n", &err)); - EXPECT_EQ("input:3: duplicate pool 'foo'\n" - "pool foo\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("pool foo\n" - " depth = -1\n", &err)); - EXPECT_EQ("input:2: invalid pool depth\n" - " depth = -1\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("pool foo\n" - " bar = 1\n", &err)); - EXPECT_EQ("input:2: unexpected variable 'bar'\n" - " bar = 1\n" - " ^ near here" - , err); - } - - { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - // Pool names are dereferenced at edge parsing time. - EXPECT_FALSE(parser.ParseTest("rule run\n" - " command = echo\n" - " pool = unnamed_pool\n" - "build out: run in\n", &err)); - EXPECT_EQ("input:5: unknown pool name 'unnamed_pool'\n", err); - } -} - -TEST_F(ParserTest, MissingInput) { - State local_state; - ManifestParser parser(&local_state, &fs_); - string err; - EXPECT_FALSE(parser.Load("build.ninja", &err)); - EXPECT_EQ("loading 'build.ninja': No such file or directory", err); -} - -TEST_F(ParserTest, MultipleOutputs) { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_TRUE(parser.ParseTest("rule cc\n command = foo\n depfile = bar\n" - "build a.o b.o: cc c.cc\n", - &err)); - EXPECT_EQ("", err); -} - -TEST_F(ParserTest, MultipleOutputsWithDeps) { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - EXPECT_FALSE(parser.ParseTest("rule cc\n command = foo\n deps = gcc\n" - "build a.o b.o: cc c.cc\n", - &err)); - EXPECT_EQ("input:5: multiple outputs aren't (yet?) supported by depslog; " - "bring this up on the mailing list if it affects you\n", err); -} - -TEST_F(ParserTest, SubNinja) { - fs_.Create("test.ninja", - "var = inner\n" - "build $builddir/inner: varref\n"); - ASSERT_NO_FATAL_FAILURE(AssertParse( -"builddir = some_dir/\n" -"rule varref\n" -" command = varref $var\n" -"var = outer\n" -"build $builddir/outer: varref\n" -"subninja test.ninja\n" -"build $builddir/outer2: varref\n")); - ASSERT_EQ(1u, fs_.files_read_.size()); - - EXPECT_EQ("test.ninja", fs_.files_read_[0]); - EXPECT_TRUE(state.LookupNode("some_dir/outer")); - // Verify our builddir setting is inherited. - EXPECT_TRUE(state.LookupNode("some_dir/inner")); - - ASSERT_EQ(3u, state.edges_.size()); - EXPECT_EQ("varref outer", state.edges_[0]->EvaluateCommand()); - EXPECT_EQ("varref inner", state.edges_[1]->EvaluateCommand()); - EXPECT_EQ("varref outer", state.edges_[2]->EvaluateCommand()); -} - -TEST_F(ParserTest, MissingSubNinja) { - ManifestParser parser(&state, &fs_); - string err; - EXPECT_FALSE(parser.ParseTest("subninja foo.ninja\n", &err)); - EXPECT_EQ("input:1: loading 'foo.ninja': No such file or directory\n" - "subninja foo.ninja\n" - " ^ near here" - , err); -} - -TEST_F(ParserTest, DuplicateRuleInDifferentSubninjas) { - // Test that rules are scoped to subninjas. - fs_.Create("test.ninja", "rule cat\n" - " command = cat\n"); - ManifestParser parser(&state, &fs_); - string err; - EXPECT_TRUE(parser.ParseTest("rule cat\n" - " command = cat\n" - "subninja test.ninja\n", &err)); -} - -TEST_F(ParserTest, DuplicateRuleInDifferentSubninjasWithInclude) { - // Test that rules are scoped to subninjas even with includes. - fs_.Create("rules.ninja", "rule cat\n" - " command = cat\n"); - fs_.Create("test.ninja", "include rules.ninja\n" - "build x : cat\n"); - ManifestParser parser(&state, &fs_); - string err; - EXPECT_TRUE(parser.ParseTest("include rules.ninja\n" - "subninja test.ninja\n" - "build y : cat\n", &err)); -} - -TEST_F(ParserTest, Include) { - fs_.Create("include.ninja", "var = inner\n"); - ASSERT_NO_FATAL_FAILURE(AssertParse( -"var = outer\n" -"include include.ninja\n")); - - ASSERT_EQ(1u, fs_.files_read_.size()); - EXPECT_EQ("include.ninja", fs_.files_read_[0]); - EXPECT_EQ("inner", state.bindings_.LookupVariable("var")); -} - -TEST_F(ParserTest, BrokenInclude) { - fs_.Create("include.ninja", "build\n"); - ManifestParser parser(&state, &fs_); - string err; - EXPECT_FALSE(parser.ParseTest("include include.ninja\n", &err)); - EXPECT_EQ("include.ninja:1: expected path\n" - "build\n" - " ^ near here" - , err); -} - -TEST_F(ParserTest, Implicit) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build foo: cat bar | baz\n")); - - Edge* edge = state.LookupNode("foo")->in_edge(); - ASSERT_TRUE(edge->is_implicit(1)); -} - -TEST_F(ParserTest, OrderOnly) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n command = cat $in > $out\n" -"build foo: cat bar || baz\n")); - - Edge* edge = state.LookupNode("foo")->in_edge(); - ASSERT_TRUE(edge->is_order_only(1)); -} - -TEST_F(ParserTest, ImplicitOutput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build foo | imp: cat bar\n")); - - Edge* edge = state.LookupNode("imp")->in_edge(); - ASSERT_EQ(edge->outputs_.size(), 2); - EXPECT_TRUE(edge->is_implicit_out(1)); -} - -TEST_F(ParserTest, ImplicitOutputEmpty) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build foo | : cat bar\n")); - - Edge* edge = state.LookupNode("foo")->in_edge(); - ASSERT_EQ(edge->outputs_.size(), 1); - EXPECT_FALSE(edge->is_implicit_out(0)); -} - -TEST_F(ParserTest, ImplicitOutputDupe) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build foo baz | foo baq foo: cat bar\n")); - - Edge* edge = state.LookupNode("foo")->in_edge(); - ASSERT_EQ(edge->outputs_.size(), 3); - EXPECT_FALSE(edge->is_implicit_out(0)); - EXPECT_FALSE(edge->is_implicit_out(1)); - EXPECT_TRUE(edge->is_implicit_out(2)); -} - -TEST_F(ParserTest, ImplicitOutputDupes) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build foo foo foo | foo foo foo foo: cat bar\n")); - - Edge* edge = state.LookupNode("foo")->in_edge(); - ASSERT_EQ(edge->outputs_.size(), 1); - EXPECT_FALSE(edge->is_implicit_out(0)); -} - -TEST_F(ParserTest, NoExplicitOutput) { - ManifestParser parser(&state, NULL); - string err; - EXPECT_TRUE(parser.ParseTest( -"rule cat\n" -" command = cat $in > $out\n" -"build | imp : cat bar\n", &err)); -} - -TEST_F(ParserTest, DefaultDefault) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n command = cat $in > $out\n" -"build a: cat foo\n" -"build b: cat foo\n" -"build c: cat foo\n" -"build d: cat foo\n")); - - string err; - EXPECT_EQ(4u, state.DefaultNodes(&err).size()); - EXPECT_EQ("", err); -} - -TEST_F(ParserTest, DefaultDefaultCycle) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n command = cat $in > $out\n" -"build a: cat a\n")); - - string err; - EXPECT_EQ(0u, state.DefaultNodes(&err).size()); - EXPECT_EQ("could not determine root nodes of build graph", err); -} - -TEST_F(ParserTest, DefaultStatements) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n command = cat $in > $out\n" -"build a: cat foo\n" -"build b: cat foo\n" -"build c: cat foo\n" -"build d: cat foo\n" -"third = c\n" -"default a b\n" -"default $third\n")); - - string err; - vector nodes = state.DefaultNodes(&err); - EXPECT_EQ("", err); - ASSERT_EQ(3u, nodes.size()); - EXPECT_EQ("a", nodes[0]->path()); - EXPECT_EQ("b", nodes[1]->path()); - EXPECT_EQ("c", nodes[2]->path()); -} - -TEST_F(ParserTest, UTF8) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule utf8\n" -" command = true\n" -" description = compilaci\xC3\xB3\n")); -} - -TEST_F(ParserTest, CRLF) { - State local_state; - ManifestParser parser(&local_state, NULL); - string err; - - EXPECT_TRUE(parser.ParseTest("# comment with crlf\r\n", &err)); - EXPECT_TRUE(parser.ParseTest("foo = foo\nbar = bar\r\n", &err)); - EXPECT_TRUE(parser.ParseTest( - "pool link_pool\r\n" - " depth = 15\r\n\r\n" - "rule xyz\r\n" - " command = something$expand \r\n" - " description = YAY!\r\n", - &err)); -} - -TEST_F(ParserTest, DyndepNotSpecified) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build result: cat in\n")); - Edge* edge = state.GetNode("result", 0)->in_edge(); - ASSERT_FALSE(edge->dyndep_); -} - -TEST_F(ParserTest, DyndepNotInput) { - State lstate; - ManifestParser parser(&lstate, NULL); - string err; - EXPECT_FALSE(parser.ParseTest( -"rule touch\n" -" command = touch $out\n" -"build result: touch\n" -" dyndep = notin\n", - &err)); - EXPECT_EQ("input:5: dyndep 'notin' is not an input\n", err); -} - -TEST_F(ParserTest, DyndepExplicitInput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build result: cat in\n" -" dyndep = in\n")); - Edge* edge = state.GetNode("result", 0)->in_edge(); - ASSERT_TRUE(edge->dyndep_); - EXPECT_TRUE(edge->dyndep_->dyndep_pending()); - EXPECT_EQ(edge->dyndep_->path(), "in"); -} - -TEST_F(ParserTest, DyndepImplicitInput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build result: cat in | dd\n" -" dyndep = dd\n")); - Edge* edge = state.GetNode("result", 0)->in_edge(); - ASSERT_TRUE(edge->dyndep_); - EXPECT_TRUE(edge->dyndep_->dyndep_pending()); - EXPECT_EQ(edge->dyndep_->path(), "dd"); -} - -TEST_F(ParserTest, DyndepOrderOnlyInput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -"build result: cat in || dd\n" -" dyndep = dd\n")); - Edge* edge = state.GetNode("result", 0)->in_edge(); - ASSERT_TRUE(edge->dyndep_); - EXPECT_TRUE(edge->dyndep_->dyndep_pending()); - EXPECT_EQ(edge->dyndep_->path(), "dd"); -} - -TEST_F(ParserTest, DyndepRuleInput) { - ASSERT_NO_FATAL_FAILURE(AssertParse( -"rule cat\n" -" command = cat $in > $out\n" -" dyndep = $in\n" -"build result: cat in\n")); - Edge* edge = state.GetNode("result", 0)->in_edge(); - ASSERT_TRUE(edge->dyndep_); - EXPECT_TRUE(edge->dyndep_->dyndep_pending()); - EXPECT_EQ(edge->dyndep_->path(), "in"); -} diff --git a/ninja/src/metrics.cc b/ninja/src/metrics.cc deleted file mode 100644 index 9a08d81137e..00000000000 --- a/ninja/src/metrics.cc +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "metrics.h" - -#include -#include -#include - -#ifndef _WIN32 -#include -#else -#include -#endif - -#include - -#include "util.h" - -Metrics* g_metrics = NULL; - -namespace { - -#ifndef _WIN32 -/// Compute a platform-specific high-res timer value that fits into an int64. -int64_t HighResTimer() { - timeval tv; - if (gettimeofday(&tv, NULL) < 0) - Fatal("gettimeofday: %s", strerror(errno)); - return (int64_t)tv.tv_sec * 1000*1000 + tv.tv_usec; -} - -/// Convert a delta of HighResTimer() values to microseconds. -int64_t TimerToMicros(int64_t dt) { - // No conversion necessary. - return dt; -} -#else -int64_t LargeIntegerToInt64(const LARGE_INTEGER& i) { - return ((int64_t)i.HighPart) << 32 | i.LowPart; -} - -int64_t HighResTimer() { - LARGE_INTEGER counter; - if (!QueryPerformanceCounter(&counter)) - Fatal("QueryPerformanceCounter: %s", GetLastErrorString().c_str()); - return LargeIntegerToInt64(counter); -} - -int64_t TimerToMicros(int64_t dt) { - static int64_t ticks_per_sec = 0; - if (!ticks_per_sec) { - LARGE_INTEGER freq; - if (!QueryPerformanceFrequency(&freq)) - Fatal("QueryPerformanceFrequency: %s", GetLastErrorString().c_str()); - ticks_per_sec = LargeIntegerToInt64(freq); - } - - // dt is in ticks. We want microseconds. - return (dt * 1000000) / ticks_per_sec; -} -#endif - -} // anonymous namespace - - -ScopedMetric::ScopedMetric(Metric* metric) { - metric_ = metric; - if (!metric_) - return; - start_ = HighResTimer(); -} -ScopedMetric::~ScopedMetric() { - if (!metric_) - return; - metric_->count++; - int64_t dt = TimerToMicros(HighResTimer() - start_); - metric_->sum += dt; - metric_->maximum = max(dt,metric_->maximum ); -} - -Metric* Metrics::NewMetric(const string& name) { - Metric* metric = new Metric; - metric->name = name; - metric->count = 0; - metric->sum = 0; - metric->maximum = 0; - metrics_.push_back(metric); - return metric; -} - -void Metrics::Report() { - int width = 0; - for (vector::iterator i = metrics_.begin(); - i != metrics_.end(); ++i) { - width = max((int)(*i)->name.size(), width); - } - - printf("%-*s\t%-6s\t%-9s\t%-9s\t%s\n", width, - "metric", "count", "avg (us)", "maximum (us)","total (ms)"); - for (vector::iterator i = metrics_.begin(); - i != metrics_.end(); ++i) { - Metric* metric = *i; - double total = metric->sum / (double)1000; - double avg = metric->sum / (double)metric->count; - printf("%-*s\t%-6d\t%-8.1f\t%-8.1f\t%.1f\n", width, metric->name.c_str(), - metric->count, avg, (double)metric->maximum,total); - } -} - -uint64_t Stopwatch::Now() const { - return TimerToMicros(HighResTimer()); -} - -int64_t GetTimeMillis() { - return TimerToMicros(HighResTimer()) / 1000; -} - diff --git a/ninja/src/metrics.h b/ninja/src/metrics.h deleted file mode 100644 index 1359ddb5917..00000000000 --- a/ninja/src/metrics.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_METRICS_H_ -#define NINJA_METRICS_H_ - -#include -#include -using namespace std; - -#include "util.h" // For int64_t. - -/// The Metrics module is used for the debug mode that dumps timing stats of -/// various actions. To use, see METRIC_RECORD below. - -/// A single metrics we're tracking, like "depfile load time". -struct Metric { - string name; - /// Number of times we've hit the code path. - int count; - /// Total time (in micros) we've spent on the code path. - int64_t sum; - int64_t maximum; -}; - - -/// A scoped object for recording a metric across the body of a function. -/// Used by the METRIC_RECORD macro. -struct ScopedMetric { - explicit ScopedMetric(Metric* metric); - ~ScopedMetric(); - -private: - Metric* metric_; - /// Timestamp when the measurement started. - /// Value is platform-dependent. - int64_t start_; -}; - -/// The singleton that stores metrics and prints the report. -struct Metrics { - Metric* NewMetric(const string& name); - - /// Print a summary report to stdout. - void Report(); - -private: - vector metrics_; -}; - -/// Get the current time as relative to some epoch. -/// Epoch varies between platforms; only useful for measuring elapsed time. -int64_t GetTimeMillis(); - -/// A simple stopwatch which returns the time -/// in seconds since Restart() was called. -struct Stopwatch { - public: - Stopwatch() : started_(0) {} - - /// Seconds since Restart() call. - double Elapsed() const { - return 1e-6 * static_cast(Now() - started_); - } - - void Restart() { started_ = Now(); } - - private: - uint64_t started_; - uint64_t Now() const; -}; - -/// The primary interface to metrics. Use METRIC_RECORD("foobar") at the top -/// of a function to get timing stats recorded for each call of the function. -#if 0 -#define METRIC_RECORD(name) \ - static Metric* metrics_h_metric = \ - g_metrics ? g_metrics->NewMetric(name) : NULL; \ - ScopedMetric metrics_h_scoped(metrics_h_metric); -#else -#define METRIC_RECORD(name) do {} while(0) -#endif -extern Metrics* g_metrics; - -#endif // NINJA_METRICS_H_ diff --git a/ninja/src/minidump-win32.cc b/ninja/src/minidump-win32.cc deleted file mode 100644 index ca936387bd0..00000000000 --- a/ninja/src/minidump-win32.cc +++ /dev/null @@ -1,87 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifdef _MSC_VER - -#include -#include - -#include "util.h" - -typedef BOOL (WINAPI *MiniDumpWriteDumpFunc) ( - IN HANDLE, - IN DWORD, - IN HANDLE, - IN MINIDUMP_TYPE, - IN CONST PMINIDUMP_EXCEPTION_INFORMATION, OPTIONAL - IN CONST PMINIDUMP_USER_STREAM_INFORMATION, OPTIONAL - IN CONST PMINIDUMP_CALLBACK_INFORMATION OPTIONAL - ); - -/// Creates a windows minidump in temp folder. -void CreateWin32MiniDump(_EXCEPTION_POINTERS* pep) { - char temp_path[MAX_PATH]; - GetTempPathA(sizeof(temp_path), temp_path); - char temp_file[MAX_PATH]; - sprintf(temp_file, "%s\\ninja_crash_dump_%lu.dmp", - temp_path, GetCurrentProcessId()); - - // Delete any previous minidump of the same name. - DeleteFileA(temp_file); - - // Load DbgHelp.dll dynamically, as library is not present on all - // Windows versions. - HMODULE dbghelp = LoadLibraryA("dbghelp.dll"); - if (dbghelp == NULL) { - Error("failed to create minidump: LoadLibrary('dbghelp.dll'): %s", - GetLastErrorString().c_str()); - return; - } - - MiniDumpWriteDumpFunc mini_dump_write_dump = - (MiniDumpWriteDumpFunc)GetProcAddress(dbghelp, "MiniDumpWriteDump"); - if (mini_dump_write_dump == NULL) { - Error("failed to create minidump: GetProcAddress('MiniDumpWriteDump'): %s", - GetLastErrorString().c_str()); - return; - } - - HANDLE hFile = CreateFileA(temp_file, GENERIC_READ | GENERIC_WRITE, 0, NULL, - CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); - if (hFile == NULL) { - Error("failed to create minidump: CreateFileA(%s): %s", - temp_file, GetLastErrorString().c_str()); - return; - } - - MINIDUMP_EXCEPTION_INFORMATION mdei; - mdei.ThreadId = GetCurrentThreadId(); - mdei.ExceptionPointers = pep; - mdei.ClientPointers = FALSE; - MINIDUMP_TYPE mdt = (MINIDUMP_TYPE) (MiniDumpWithDataSegs | - MiniDumpWithHandleData); - - BOOL rv = mini_dump_write_dump(GetCurrentProcess(), GetCurrentProcessId(), - hFile, mdt, (pep != 0) ? &mdei : 0, 0, 0); - CloseHandle(hFile); - - if (!rv) { - Error("MiniDumpWriteDump failed: %s", GetLastErrorString().c_str()); - return; - } - - Warning("minidump created: %s", temp_file); -} - -#endif // _MSC_VER diff --git a/ninja/src/msvc_helper-win32.cc b/ninja/src/msvc_helper-win32.cc deleted file mode 100644 index de6147a5e5e..00000000000 --- a/ninja/src/msvc_helper-win32.cc +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "msvc_helper.h" - -#include - -#include "util.h" - -namespace { - -string Replace(const string& input, const string& find, const string& replace) { - string result = input; - size_t start_pos = 0; - while ((start_pos = result.find(find, start_pos)) != string::npos) { - result.replace(start_pos, find.length(), replace); - start_pos += replace.length(); - } - return result; -} - -} // anonymous namespace - -string EscapeForDepfile(const string& path) { - // Depfiles don't escape single \. - return Replace(path, " ", "\\ "); -} - -int CLWrapper::Run(const string& command, string* output) { - SECURITY_ATTRIBUTES security_attributes = {}; - security_attributes.nLength = sizeof(SECURITY_ATTRIBUTES); - security_attributes.bInheritHandle = TRUE; - - // Must be inheritable so subprocesses can dup to children. - HANDLE nul = - CreateFileA("NUL", GENERIC_READ, - FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, - &security_attributes, OPEN_EXISTING, 0, NULL); - if (nul == INVALID_HANDLE_VALUE) - Fatal("couldn't open nul"); - - HANDLE stdout_read, stdout_write; - if (!CreatePipe(&stdout_read, &stdout_write, &security_attributes, 0)) - Win32Fatal("CreatePipe"); - - if (!SetHandleInformation(stdout_read, HANDLE_FLAG_INHERIT, 0)) - Win32Fatal("SetHandleInformation"); - - PROCESS_INFORMATION process_info = {}; - STARTUPINFOA startup_info = {}; - startup_info.cb = sizeof(STARTUPINFOA); - startup_info.hStdInput = nul; - startup_info.hStdError = ::GetStdHandle(STD_ERROR_HANDLE); - startup_info.hStdOutput = stdout_write; - startup_info.dwFlags |= STARTF_USESTDHANDLES; - - if (!CreateProcessA(NULL, (char*)command.c_str(), NULL, NULL, - /* inherit handles */ TRUE, 0, - env_block_, NULL, - &startup_info, &process_info)) { - Win32Fatal("CreateProcess"); - } - - if (!CloseHandle(nul) || - !CloseHandle(stdout_write)) { - Win32Fatal("CloseHandle"); - } - - // Read all output of the subprocess. - DWORD read_len = 1; - while (read_len) { - char buf[64 << 10]; - read_len = 0; - if (!::ReadFile(stdout_read, buf, sizeof(buf), &read_len, NULL) && - GetLastError() != ERROR_BROKEN_PIPE) { - Win32Fatal("ReadFile"); - } - output->append(buf, read_len); - } - - // Wait for it to exit and grab its exit code. - if (WaitForSingleObject(process_info.hProcess, INFINITE) == WAIT_FAILED) - Win32Fatal("WaitForSingleObject"); - DWORD exit_code = 0; - if (!GetExitCodeProcess(process_info.hProcess, &exit_code)) - Win32Fatal("GetExitCodeProcess"); - - if (!CloseHandle(stdout_read) || - !CloseHandle(process_info.hProcess) || - !CloseHandle(process_info.hThread)) { - Win32Fatal("CloseHandle"); - } - - return exit_code; -} diff --git a/ninja/src/msvc_helper.h b/ninja/src/msvc_helper.h deleted file mode 100644 index 70d1fff794f..00000000000 --- a/ninja/src/msvc_helper.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -using namespace std; - -string EscapeForDepfile(const string& path); - -/// Wraps a synchronous execution of a CL subprocess. -struct CLWrapper { - CLWrapper() : env_block_(NULL) {} - - /// Set the environment block (as suitable for CreateProcess) to be used - /// by Run(). - void SetEnvBlock(void* env_block) { env_block_ = env_block; } - - /// Start a process and gather its raw output. Returns its exit code. - /// Crashes (calls Fatal()) on error. - int Run(const string& command, string* output); - - void* env_block_; -}; diff --git a/ninja/src/msvc_helper_main-win32.cc b/ninja/src/msvc_helper_main-win32.cc deleted file mode 100644 index 644b2a2e244..00000000000 --- a/ninja/src/msvc_helper_main-win32.cc +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "msvc_helper.h" - -#include -#include -#include -#include - -#include "clparser.h" -#include "util.h" - -#include "getopt.h" - -namespace { - -void Usage() { - printf( -"usage: ninja -t msvc [options] -- cl.exe /showIncludes /otherArgs\n" -"options:\n" -" -e ENVFILE load environment block from ENVFILE as environment\n" -" -o FILE write output dependency information to FILE.d\n" -" -p STRING localized prefix of msvc's /showIncludes output\n" - ); -} - -void PushPathIntoEnvironment(const string& env_block) { - const char* as_str = env_block.c_str(); - while (as_str[0]) { - if (_strnicmp(as_str, "path=", 5) == 0) { - _putenv(as_str); - return; - } else { - as_str = &as_str[strlen(as_str) + 1]; - } - } -} - -void WriteDepFileOrDie(const char* object_path, const CLParser& parse) { - string depfile_path = string(object_path) + ".d"; - FILE* depfile = fopen(depfile_path.c_str(), "w"); - if (!depfile) { - unlink(object_path); - Fatal("opening %s: %s", depfile_path.c_str(), - GetLastErrorString().c_str()); - } - if (fprintf(depfile, "%s: ", object_path) < 0) { - unlink(object_path); - fclose(depfile); - unlink(depfile_path.c_str()); - Fatal("writing %s", depfile_path.c_str()); - } - const set& headers = parse.includes_; - for (set::const_iterator i = headers.begin(); - i != headers.end(); ++i) { - if (fprintf(depfile, "%s\n", EscapeForDepfile(*i).c_str()) < 0) { - unlink(object_path); - fclose(depfile); - unlink(depfile_path.c_str()); - Fatal("writing %s", depfile_path.c_str()); - } - } - fclose(depfile); -} - -} // anonymous namespace - -int MSVCHelperMain(int argc, char** argv) { - const char* output_filename = NULL; - const char* envfile = NULL; - - const option kLongOptions[] = { - { "help", no_argument, NULL, 'h' }, - { NULL, 0, NULL, 0 } - }; - int opt; - string deps_prefix; - while ((opt = getopt_long(argc, argv, "e:o:p:h", kLongOptions, NULL)) != -1) { - switch (opt) { - case 'e': - envfile = optarg; - break; - case 'o': - output_filename = optarg; - break; - case 'p': - deps_prefix = optarg; - break; - case 'h': - default: - Usage(); - return 0; - } - } - - string env; - if (envfile) { - string err; - if (ReadFile(envfile, &env, &err) != 0) - Fatal("couldn't open %s: %s", envfile, err.c_str()); - PushPathIntoEnvironment(env); - } - - char* command = GetCommandLineA(); - command = strstr(command, " -- "); - if (!command) { - Fatal("expected command line to end with \" -- command args\""); - } - command += 4; - - CLWrapper cl; - if (!env.empty()) - cl.SetEnvBlock((void*)env.data()); - string output; - int exit_code = cl.Run(command, &output); - - if (output_filename) { - CLParser parser; - string err; - if (!parser.Parse(output, deps_prefix, &output, &err)) - Fatal("%s\n", err.c_str()); - WriteDepFileOrDie(output_filename, parser); - } - - if (output.empty()) - return exit_code; - - // CLWrapper's output already as \r\n line endings, make sure the C runtime - // doesn't expand this to \r\r\n. - _setmode(_fileno(stdout), _O_BINARY); - // Avoid printf and C strings, since the actual output might contain null - // bytes like UTF-16 does (yuck). - fwrite(&output[0], 1, output.size(), stdout); - - return exit_code; -} diff --git a/ninja/src/msvc_helper_test.cc b/ninja/src/msvc_helper_test.cc deleted file mode 100644 index eaae51f5645..00000000000 --- a/ninja/src/msvc_helper_test.cc +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "msvc_helper.h" - -#include "test.h" -#include "util.h" - -TEST(EscapeForDepfileTest, SpacesInFilename) { - ASSERT_EQ("sub\\some\\ sdk\\foo.h", - EscapeForDepfile("sub\\some sdk\\foo.h")); -} - -TEST(MSVCHelperTest, EnvBlock) { - char env_block[] = "foo=bar\0"; - CLWrapper cl; - cl.SetEnvBlock(env_block); - string output; - cl.Run("cmd /c \"echo foo is %foo%", &output); - ASSERT_EQ("foo is bar\r\n", output); -} - -TEST(MSVCHelperTest, NoReadOfStderr) { - CLWrapper cl; - string output; - cl.Run("cmd /c \"echo to stdout&& echo to stderr 1>&2", &output); - ASSERT_EQ("to stdout\r\n", output); -} diff --git a/ninja/src/ninja.cc b/ninja/src/ninja.cc deleted file mode 100644 index 0cbc8c59b0b..00000000000 --- a/ninja/src/ninja.cc +++ /dev/null @@ -1,1429 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -#include -#include -#include - -#ifdef _WIN32 -#include "getopt.h" -#include -#include -#elif defined(_AIX) -#include "getopt.h" -#include -#else -#include -#include -#endif - -#include "browse.h" -#include "build.h" -#include "build_log.h" -#include "deps_log.h" -#include "clean.h" -#include "debug_flags.h" -#include "disk_interface.h" -#include "manifest_parser.h" -#include "metrics.h" -#include "state.h" -#include "util.h" -#include "version.h" - -#ifdef _MSC_VER -// Defined in msvc_helper_main-win32.cc. -int MSVCHelperMain(int argc, char** argv); - -// Defined in minidump-win32.cc. -void CreateWin32MiniDump(_EXCEPTION_POINTERS* pep); -#endif - -namespace { - -struct Tool; - -/// Command-line options. -struct Options { - /// Build file to load. - const char* input_file; - - /// Directory to change into before running. - const char* working_dir; - - /// Tool to run rather than building. - const Tool* tool; - - /// Whether duplicate rules for one target should warn or print an error. - bool dupe_edges_should_err; - - /// Whether phony cycles should warn or print an error. - bool phony_cycle_should_err; - - /// Whether a depfile with multiple targets on separate lines should - /// warn or print an error. - bool depfile_distinct_target_lines_should_err; -}; - -/// The Ninja main() loads up a series of data structures; various tools need -/// to poke into these, so store them as fields on an object. -struct NinjaMain : public BuildLogUser { - NinjaMain(const char* ninja_command, const BuildConfig& config) : - ninja_command_(ninja_command), config_(config) {} - - /// Command line used to run Ninja. - const char* ninja_command_; - - /// Build configuration set from flags (e.g. parallelism). - const BuildConfig& config_; - - /// Loaded state (rules, nodes). - State state_; - - /// Functions for accesssing the disk. - RealDiskInterface disk_interface_; - - /// The build directory, used for storing the build log etc. - string build_dir_; - - BuildLog build_log_; - DepsLog deps_log_; - - /// The type of functions that are the entry points to tools (subcommands). - typedef int (NinjaMain::*ToolFunc)(const Options*, int, char**); - - /// Get the Node for a given command-line path, handling features like - /// spell correction. - Node* CollectTarget(const char* cpath, string* err); - - /// CollectTarget for all command-line arguments, filling in \a targets. - bool CollectTargetsFromArgs(int argc, char* argv[], - vector* targets, string* err); - - // The various subcommands, run via "-t XXX". -#if 0 - int ToolGraph(const Options* options, int argc, char* argv[]); - int ToolQuery(const Options* options, int argc, char* argv[]); - int ToolDeps(const Options* options, int argc, char* argv[]); - int ToolBrowse(const Options* options, int argc, char* argv[]); - int ToolMSVC(const Options* options, int argc, char* argv[]); -#endif - int ToolTargets(const Options* options, int argc, char* argv[]); - int ToolCommands(const Options* options, int argc, char* argv[]); - int ToolClean(const Options* options, int argc, char* argv[]); - int ToolCleanDead(const Options* options, int argc, char* argv[]); - int ToolCompilationDatabase(const Options* options, int argc, char* argv[]); - int ToolRecompact(const Options* options, int argc, char* argv[]); - int ToolUrtle(const Options* options, int argc, char** argv); - int ToolRules(const Options* options, int argc, char* argv[]); - - /// Open the build log. - /// @return false on error. - bool OpenBuildLog(bool recompact_only = false); - - /// Open the deps log: load it, then open for writing. - /// @return false on error. - bool OpenDepsLog(bool recompact_only = false); - - /// Ensure the build directory exists, creating it if necessary. - /// @return false on error. - bool EnsureBuildDirExists(); -#if 0 - /// Rebuild the manifest, if necessary. - /// Fills in \a err on error. - /// @return true if the manifest was rebuilt. - bool RebuildManifest(const char* input_file, string* err); -#endif - /// Build the targets listed on the command line. - /// @return an exit code. - int RunBuild(int argc, char** argv); - - /// Dump the output requested by '-d stats'. - void DumpMetrics(); - - virtual bool IsPathDead(StringPiece s) const { - Node* n = state_.LookupNode(s); - if (n && n->in_edge()) - return false; - // Just checking n isn't enough: If an old output is both in the build log - // and in the deps log, it will have a Node object in state_. (It will also - // have an in edge if one of its inputs is another output that's in the deps - // log, but having a deps edge product an output that's input to another deps - // edge is rare, and the first recompaction will delete all old outputs from - // the deps log, and then a second recompaction will clear the build log, - // which seems good enough for this corner case.) - // Do keep entries around for files which still exist on disk, for - // generators that want to use this information. - string err; - TimeStamp mtime = disk_interface_.Stat(s.AsString(), &err); - if (mtime == -1) - Error("%s", err.c_str()); // Log and ignore Stat() errors. - return mtime == 0; - } -}; - -/// Subtools, accessible via "-t foo". -struct Tool { - /// Short name of the tool. - const char* name; - - /// Description (shown in "-t list"). - const char* desc; - - /// When to run the tool. - enum { - /// Run after parsing the command-line flags and potentially changing - /// the current working directory (as early as possible). - RUN_AFTER_FLAGS, - - /// Run after loading build.ninja. - RUN_AFTER_LOAD, - - /// Run after loading the build/deps logs. - RUN_AFTER_LOGS, - } when; - - /// Implementation of the tool. - NinjaMain::ToolFunc func; -}; - -/// Print usage information. -void Usage(const BuildConfig& config) { - fprintf(stderr, -"usage: ninja [options] [targets...]\n" -"\n" -"if targets are unspecified, builds the 'default' target (see manual).\n" -"\n" -"options:\n" -" --version print ninja version (\"%s\")\n" -" -v, --verbose show all command lines while building\n" -"\n" -" -C DIR change to DIR before doing anything else\n" -" -f FILE specify input build file [default=build.ninja]\n" -"\n" -" -j N run N jobs in parallel (0 means infinity) [default=%d on this system]\n" -" -k N keep going until N jobs fail (0 means infinity) [default=1]\n" -" -l N do not start new jobs if the load average is greater than N\n" -" -n dry run (don't run commands but act like they succeeded)\n" -"\n" -" -d MODE enable debugging (use '-d list' to list modes)\n" -" -t TOOL run a subtool (use '-t list' to list subtools)\n" -" terminates toplevel options; further flags are passed to the tool\n" -" -w FLAG adjust warnings (use '-w list' to list warnings)\n", - kNinjaVersion, config.parallelism); -} - -/// Choose a default value for the -j (parallelism) flag. -int GuessParallelism() { - switch (int processors = GetProcessorCount()) { - case 0: - case 1: - return 2; - case 2: - return 3; - default: - return processors + 2; - } -} -#if 0 -/// Rebuild the build manifest, if necessary. -/// Returns true if the manifest was rebuilt. -bool NinjaMain::RebuildManifest(const char* input_file, string* err) { - string path = input_file; - uint64_t slash_bits; // Unused because this path is only used for lookup. - if (!CanonicalizePath(&path, &slash_bits, err)) - return false; - Node* node = state_.LookupNode(path); - if (!node) - return false; - - Builder builder(&state_, config_, &build_log_, &deps_log_, &disk_interface_, NULL); - if (!builder.AddTarget(node, err)) - return false; - - if (builder.AlreadyUpToDate()) - return false; // Not an error, but we didn't rebuild. - - if (!builder.Build(err)) - return false; - - // The manifest was only rebuilt if it is now dirty (it may have been cleaned - // by a restat). - if (!node->dirty()) { - // Reset the state to prevent problems like - // https://github.com/ninja-build/ninja/issues/874 - state_.Reset(); - return false; - } - - return true; -} -#endif -Node* NinjaMain::CollectTarget(const char* cpath, string* err) { - string path = cpath; - uint64_t slash_bits; - if (!CanonicalizePath(&path, &slash_bits, err)) - return NULL; - - // Special syntax: "foo.cc^" means "the first output of foo.cc". - bool first_dependent = false; - if (!path.empty() && path[path.size() - 1] == '^') { - path.resize(path.size() - 1); - first_dependent = true; - } - - Node* node = state_.LookupNode(path); - if (node) { - if (first_dependent) { - if (node->out_edges().empty()) { - *err = "'" + path + "' has no out edge"; - return NULL; - } - Edge* edge = node->out_edges()[0]; - if (edge->outputs_.empty()) { - edge->Dump(); - Fatal("edge has no outputs"); - } - node = edge->outputs_[0]; - } - return node; - } else { - *err = - "unknown target '" + Node::PathDecanonicalized(path, slash_bits) + "'"; - if (path == "clean") { - *err += ", did you mean 'ninja -t clean'?"; - } else if (path == "help") { - *err += ", did you mean 'ninja -h'?"; - } else { - Node* suggestion = state_.SpellcheckNode(path); - if (suggestion) { - *err += ", did you mean '" + suggestion->path() + "'?"; - } - } - return NULL; - } -} - -bool NinjaMain::CollectTargetsFromArgs(int argc, char* argv[], - vector* targets, string* err) { - if (argc == 0) { - *targets = state_.DefaultNodes(err); - return err->empty(); - } - - for (int i = 0; i < argc; ++i) { - Node* node = CollectTarget(argv[i], err); - if (node == NULL) - return false; - targets->push_back(node); - } - return true; -} -#if 0 -int NinjaMain::ToolGraph(const Options* options, int argc, char* argv[]) { - vector nodes; - string err; - if (!CollectTargetsFromArgs(argc, argv, &nodes, &err)) { - Error("%s", err.c_str()); - return 1; - } - - GraphViz graph(&state_, &disk_interface_); - graph.Start(); - for (vector::const_iterator n = nodes.begin(); n != nodes.end(); ++n) - graph.AddTarget(*n); - graph.Finish(); - - return 0; -} - -int NinjaMain::ToolQuery(const Options* options, int argc, char* argv[]) { - if (argc == 0) { - Error("expected a target to query"); - return 1; - } - - DyndepLoader dyndep_loader(&state_, &disk_interface_); - - for (int i = 0; i < argc; ++i) { - string err; - Node* node = CollectTarget(argv[i], &err); - if (!node) { - Error("%s", err.c_str()); - return 1; - } - - printf("%s:\n", node->path().c_str()); - if (Edge* edge = node->in_edge()) { - if (edge->dyndep_ && edge->dyndep_->dyndep_pending()) { - if (!dyndep_loader.LoadDyndeps(edge->dyndep_, &err)) { - Warning("%s\n", err.c_str()); - } - } - printf(" input: %s\n", edge->rule_->name().c_str()); - for (int in = 0; in < (int)edge->inputs_.size(); in++) { - const char* label = ""; - if (edge->is_implicit(in)) - label = "| "; - else if (edge->is_order_only(in)) - label = "|| "; - printf(" %s%s\n", label, edge->inputs_[in]->path().c_str()); - } - } - printf(" outputs:\n"); - for (vector::const_iterator edge = node->out_edges().begin(); - edge != node->out_edges().end(); ++edge) { - for (vector::iterator out = (*edge)->outputs_.begin(); - out != (*edge)->outputs_.end(); ++out) { - printf(" %s\n", (*out)->path().c_str()); - } - } - } - return 0; -} -#endif -#if defined(NINJA_HAVE_BROWSE) -int NinjaMain::ToolBrowse(const Options* options, int argc, char* argv[]) { - RunBrowsePython(&state_, ninja_command_, options->input_file, argc, argv); - // If we get here, the browse failed. - return 1; -} -#elif 0 -int NinjaMain::ToolBrowse(const Options*, int, char**) { - Fatal("browse tool not supported on this platform"); - return 1; -} -#endif - -#if 0 -int NinjaMain::ToolMSVC(const Options* options, int argc, char* argv[]) { - // Reset getopt: push one argument onto the front of argv, reset optind. - argc++; - argv--; - optind = 0; - return MSVCHelperMain(argc, argv); -} - - -int ToolTargetsList(const vector& nodes, int depth, int indent) { - for (vector::const_iterator n = nodes.begin(); - n != nodes.end(); - ++n) { - for (int i = 0; i < indent; ++i) - printf(" "); - const char* target = (*n)->path().c_str(); - if ((*n)->in_edge()) { - // printf("%s: %s\n", target, (*n)->in_edge()->rule_->name().c_str()); - printf("%s\n", target); - if (depth > 1 || depth <= 0) - ToolTargetsList((*n)->in_edge()->inputs_, depth - 1, indent + 1); - } else { - printf("%s\n", target); - } - } - return 0; -} - -int ToolTargetsSourceList(State* state) { - for (vector::iterator e = state->edges_.begin(); - e != state->edges_.end(); ++e) { - for (vector::iterator inps = (*e)->inputs_.begin(); - inps != (*e)->inputs_.end(); ++inps) { - if (!(*inps)->in_edge()) - printf("%s\n", (*inps)->path().c_str()); - } - } - return 0; -} - -int ToolTargetsList(State* state, const string& rule_name) { - set rules; - - // Gather the outputs. - for (vector::iterator e = state->edges_.begin(); - e != state->edges_.end(); ++e) { - if ((*e)->rule_->name() == rule_name) { - for (vector::iterator out_node = (*e)->outputs_.begin(); - out_node != (*e)->outputs_.end(); ++out_node) { - rules.insert((*out_node)->path()); - } - } - } - - // Print them. - for (set::const_iterator i = rules.begin(); - i != rules.end(); ++i) { - printf("%s\n", (*i).c_str()); - } - - return 0; -} -#endif -int ToolTargetsList(State* state) { - for (vector::iterator e = state->edges_.begin(); - e != state->edges_.end(); ++e) { - for (vector::iterator out_node = (*e)->outputs_.begin(); - out_node != (*e)->outputs_.end(); ++out_node) { - // printf("%s: %s\n", - // (*out_node)->path().c_str(), - // (*e)->rule_->name().c_str()); - printf("%s\n", (*out_node)->path().c_str()); - } - } - return 0; -} -#if 0 -int NinjaMain::ToolDeps(const Options* options, int argc, char** argv) { - vector nodes; - if (argc == 0) { - for (vector::const_iterator ni = deps_log_.nodes().begin(); - ni != deps_log_.nodes().end(); ++ni) { - if (deps_log_.IsDepsEntryLiveFor(*ni)) - nodes.push_back(*ni); - } - } else { - string err; - if (!CollectTargetsFromArgs(argc, argv, &nodes, &err)) { - Error("%s", err.c_str()); - return 1; - } - } - - RealDiskInterface disk_interface; - for (vector::iterator it = nodes.begin(), end = nodes.end(); - it != end; ++it) { - DepsLog::Deps* deps = deps_log_.GetDeps(*it); - if (!deps) { - printf("%s: deps not found\n", (*it)->path().c_str()); - continue; - } - - string err; - TimeStamp mtime = disk_interface.Stat((*it)->path(), &err); - if (mtime == -1) - Error("%s", err.c_str()); // Log and ignore Stat() errors; - printf("%s: #deps %d, deps mtime %" PRId64 " (%s)\n", - (*it)->path().c_str(), deps->node_count, deps->mtime, - (!mtime || mtime > deps->mtime ? "STALE":"VALID")); - for (int i = 0; i < deps->node_count; ++i) - printf(" %s\n", deps->nodes[i]->path().c_str()); - printf("\n"); - } - - return 0; -} -#endif -int NinjaMain::ToolTargets(const Options* options, int argc, char* argv[]) { - return ToolTargetsList(&state_); -/* - int depth = 1; - if (argc >= 1) { - string mode = argv[0]; - if (mode == "rule") { - string rule; - if (argc > 1) - rule = argv[1]; - if (rule.empty()) - return ToolTargetsSourceList(&state_); - else - return ToolTargetsList(&state_, rule); - } else if (mode == "depth") { - if (argc > 1) - depth = atoi(argv[1]); - } else if (mode == "all") { - return ToolTargetsList(&state_); - } else { - const char* suggestion = - SpellcheckString(mode.c_str(), "rule", "depth", "all", NULL); - if (suggestion) { - Error("unknown target tool mode '%s', did you mean '%s'?", - mode.c_str(), suggestion); - } else { - Error("unknown target tool mode '%s'", mode.c_str()); - } - return 1; - } - } - - string err; - vector root_nodes = state_.RootNodes(&err); - if (err.empty()) { - return ToolTargetsList(root_nodes, depth, 0); - } else { - Error("%s", err.c_str()); - return 1; - } -*/ -} - -int NinjaMain::ToolRules(const Options* options, int argc, char* argv[]) { - // Parse options. - - // The rules tool uses getopt, and expects argv[0] to contain the name of - // the tool, i.e. "rules". - argc++; - argv--; - - bool print_description = false; - - optind = 1; - int opt; - while ((opt = getopt(argc, argv, const_cast("hd"))) != -1) { - switch (opt) { - case 'd': - print_description = true; - break; - case 'h': - default: - printf("usage: ninja -t rules [options]\n" - "\n" - "options:\n" - " -d also print the description of the rule\n" - " -h print this message\n" - ); - return 1; - } - } - argv += optind; - argc -= optind; - - // Print rules - - typedef map Rules; - const Rules& rules = state_.bindings_.GetRules(); - for (Rules::const_iterator i = rules.begin(); i != rules.end(); ++i) { - printf("%s", i->first.c_str()); - if (print_description) { - const Rule* rule = i->second; - const EvalString* description = rule->GetBinding("description"); - if (description != NULL) { - printf(": %s", description->Unparse().c_str()); - } - } - printf("\n"); - } - return 0; -} - -enum PrintCommandMode { PCM_Single, PCM_All }; -void PrintCommands(Edge* edge, set* seen, PrintCommandMode mode) { - if (!edge) - return; - if (!seen->insert(edge).second) - return; - - if (mode == PCM_All) { - for (vector::iterator in = edge->inputs_.begin(); - in != edge->inputs_.end(); ++in) - PrintCommands((*in)->in_edge(), seen, mode); - } - - if (!edge->is_phony()) - puts(edge->EvaluateCommand().c_str()); -} - -int NinjaMain::ToolCommands(const Options* options, int argc, char* argv[]) { - // The clean tool uses getopt, and expects argv[0] to contain the name of - // the tool, i.e. "commands". - ++argc; - --argv; - - PrintCommandMode mode = PCM_All; - - optind = 1; - int opt; - while ((opt = getopt(argc, argv, const_cast("hs"))) != -1) { - switch (opt) { - case 's': - mode = PCM_Single; - break; - case 'h': - default: - printf("usage: ninja -t commands [options] [targets]\n" -"\n" -"options:\n" -" -s only print the final command to build [target], not the whole chain\n" - ); - return 1; - } - } - argv += optind; - argc -= optind; - - vector nodes; - string err; - if (!CollectTargetsFromArgs(argc, argv, &nodes, &err)) { - Error("%s", err.c_str()); - return 1; - } - - set seen; - for (vector::iterator in = nodes.begin(); in != nodes.end(); ++in) - PrintCommands((*in)->in_edge(), &seen, mode); - - return 0; -} - -int NinjaMain::ToolClean(const Options* options, int argc, char* argv[]) { - // The clean tool uses getopt, and expects argv[0] to contain the name of - // the tool, i.e. "clean". - argc++; - argv--; - - bool generator = false; - bool clean_rules = false; - - optind = 1; - int opt; - while ((opt = getopt(argc, argv, const_cast("hgr"))) != -1) { - switch (opt) { - case 'g': - generator = true; - break; - case 'r': - clean_rules = true; - break; - case 'h': - default: - printf("usage: ninja -t clean [options] [targets]\n" -"\n" -"options:\n" -" -g also clean files marked as ninja generator output\n" -" -r interpret targets as a list of rules to clean instead\n" - ); - return 1; - } - } - argv += optind; - argc -= optind; - - if (clean_rules && argc == 0) { - Error("expected a rule to clean"); - return 1; - } - - Cleaner cleaner(&state_, config_, &disk_interface_); - if (argc >= 1) { - if (clean_rules) - return cleaner.CleanRules(argc, argv); - else - return cleaner.CleanTargets(argc, argv); - } else { - return cleaner.CleanAll(generator); - } -} - -int NinjaMain::ToolCleanDead(const Options* options, int argc, char* argv[]) { - Cleaner cleaner(&state_, config_, &disk_interface_); - cleaner.CleanDead(build_log_.entries()); - return 0; -} - -void EncodeJSONString(const char *str) { - while (*str) { - if (*str == '"' || *str == '\\') - putchar('\\'); - putchar(*str); - str++; - } -} - -enum EvaluateCommandMode { - ECM_NORMAL, - ECM_EXPAND_RSPFILE -}; -string EvaluateCommandWithRspfile(Edge* edge, EvaluateCommandMode mode) { - string command = edge->EvaluateCommand(); - if (mode == ECM_NORMAL) - return command; - - string rspfile = edge->GetUnescapedRspfile(); - if (rspfile.empty()) - return command; - - size_t index = command.find(rspfile); - if (index == 0 || index == string::npos || command[index - 1] != '@') - return command; - - string rspfile_content = edge->GetBinding("rspfile_content"); - size_t newline_index = 0; - while ((newline_index = rspfile_content.find('\n', newline_index)) != - string::npos) { - rspfile_content.replace(newline_index, 1, 1, ' '); - ++newline_index; - } - command.replace(index - 1, rspfile.length() + 1, rspfile_content); - return command; -} - -int NinjaMain::ToolCompilationDatabase(const Options* options, int argc, - char* argv[]) { - // The compdb tool uses getopt, and expects argv[0] to contain the name of - // the tool, i.e. "compdb". - argc++; - argv--; - - EvaluateCommandMode eval_mode = ECM_NORMAL; - - optind = 1; - int opt; - while ((opt = getopt(argc, argv, const_cast("hx"))) != -1) { - switch(opt) { - case 'x': - eval_mode = ECM_EXPAND_RSPFILE; - break; - - case 'h': - default: - printf( - "usage: ninja -t compdb [options] [rules]\n" - "\n" - "options:\n" - " -x expand @rspfile style response file invocations\n" - ); - return 1; - } - } - argv += optind; - argc -= optind; - - bool first = true; - vector cwd; - - do { - cwd.resize(cwd.size() + 1024); - errno = 0; - } while (!getcwd(&cwd[0], cwd.size()) && errno == ERANGE); - if (errno != 0 && errno != ERANGE) { - Error("cannot determine working directory: %s", strerror(errno)); - return 1; - } - - putchar('['); - for (vector::iterator e = state_.edges_.begin(); - e != state_.edges_.end(); ++e) { - if ((*e)->inputs_.empty()) - continue; - for (int i = 0; i != argc; ++i) { - if ((*e)->rule_->name() == argv[i]) { - if (!first) - putchar(','); - - printf("\n {\n \"directory\": \""); - EncodeJSONString(&cwd[0]); - printf("\",\n \"command\": \""); - EncodeJSONString(EvaluateCommandWithRspfile(*e, eval_mode).c_str()); - printf("\",\n \"file\": \""); - EncodeJSONString((*e)->inputs_[0]->path().c_str()); - printf("\",\n \"output\": \""); - EncodeJSONString((*e)->outputs_[0]->path().c_str()); - printf("\"\n }"); - - first = false; - } - } - } - - puts("\n]"); - return 0; -} - -int NinjaMain::ToolRecompact(const Options* options, int argc, char* argv[]) { - if (!EnsureBuildDirExists()) - return 1; - - if (!OpenBuildLog(/*recompact_only=*/true) || - !OpenDepsLog(/*recompact_only=*/true)) - return 1; - - return 0; -} - -#if 0 -int NinjaMain::ToolUrtle(const Options* options, int argc, char** argv) { - // RLE encoded. - const char* urtle = -" 13 ,3;2!2;\n8 ,;<11!;\n5 `'<10!(2`'2!\n11 ,6;, `\\. `\\9 .,c13$ec,.\n6 " -",2;11!>; `. ,;!2> .e8$2\".2 \"?7$e.\n <:<8!'` 2.3,.2` ,3!' ;,(?7\";2!2'<" -"; `?6$PF ,;,\n2 `'4!8;<3;5! J2$b,`!>;2!:2!`,d?b`!>\n26 `'-;,(<9!> $F3 )3.:!.2 d\"" -"2 ) !>\n30 7`2'<3!- \"=-='5 .2 `2-=\",!>\n25 .ze9$er2 .,cd16$bc.'\n22 .e" -"14$,26$.\n21 z45$c .\n20 J50$c\n20 14$P\"`?34$b\n20 14$ dbc `2\"?22$?7$c" -"\n20 ?18$c.6 4\"8?4\" c8$P\n9 .2,.8 \"20$c.3 ._14 J9$\n .2,2c9$bec,.2 `?" -"21$c.3`4%,3%,3 c8$P\"\n22$c2 2\"?21$bc2,.2` .2,c7$P2\",cb\n23$b bc,.2\"2" -"?14$2F2\"5?2\",J5$P\" ,zd3$\n24$ ?$3?%3 `2\"2?12$bcucd3$P3\"2 2=7$\n23$P" -"\" ,3;<5!>2;,. `4\"6?2\"2 ,9;, `\"?2$\n"; - int count = 0; - for (const char* p = urtle; *p; p++) { - if ('0' <= *p && *p <= '9') { - count = count*10 + *p - '0'; - } else { - for (int i = 0; i < max(count, 1); ++i) - printf("%c", *p); - count = 0; - } - } - return 0; -} -#endif - -/// Find the function to execute for \a tool_name and return it via \a func. -/// Returns a Tool, or NULL if Ninja should exit. -const Tool* ChooseTool(const string& tool_name) { - static const Tool kTools[] = { -#if 0 - { "browse", "browse dependency graph in a web browser", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolBrowse }, - { "msvc", "build helper for MSVC cl.exe (EXPERIMENTAL)", - Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolMSVC }, -#endif - { "clean", "clean built files", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolClean }, - { "commands", "list all commands required to rebuild given targets", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCommands }, -#if 0 - { "deps", "show dependencies stored in the deps log", - Tool::RUN_AFTER_LOGS, &NinjaMain::ToolDeps }, - { "graph", "output graphviz dot file for targets", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolGraph }, - - { "query", "show inputs/outputs for a path", - Tool::RUN_AFTER_LOGS, &NinjaMain::ToolQuery }, -#endif - { "targets", "list targets by their rule or depth in the DAG", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolTargets }, - { "compdb", "dump JSON compilation database to stdout", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolCompilationDatabase }, - { "recompact", "recompacts ninja-internal data structures", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRecompact }, - { "rules", "list all rules", - Tool::RUN_AFTER_LOAD, &NinjaMain::ToolRules }, - { "cleandead", "clean built files that are no longer produced by the manifest", - Tool::RUN_AFTER_LOGS, &NinjaMain::ToolCleanDead }, -#if 0 - { "urtle", NULL, - Tool::RUN_AFTER_FLAGS, &NinjaMain::ToolUrtle }, -#endif - { NULL, NULL, Tool::RUN_AFTER_FLAGS, NULL } - }; - - if (tool_name == "list") { - printf("ninja subtools:\n"); - for (const Tool* tool = &kTools[0]; tool->name; ++tool) { - if (tool->desc) - printf("%10s %s\n", tool->name, tool->desc); - } - return NULL; - } - - for (const Tool* tool = &kTools[0]; tool->name; ++tool) { - if (tool->name == tool_name) - return tool; - } - - vector words; - for (const Tool* tool = &kTools[0]; tool->name; ++tool) - words.push_back(tool->name); - const char* suggestion = SpellcheckStringV(tool_name, words); - if (suggestion) { - Fatal("unknown tool '%s', did you mean '%s'?", - tool_name.c_str(), suggestion); - } else { - Fatal("unknown tool '%s'", tool_name.c_str()); - } - return NULL; // Not reached. -} - -/// Enable a debugging mode. Returns false if Ninja should exit instead -/// of continuing. -bool DebugEnable(const string& name) { - if (name == "list") { - printf("debugging modes:\n" -" stats print operation counts/timing info\n" -" explain explain what caused a command to execute\n" -" keepdepfile don't delete depfiles after they're read by ninja\n" -" keeprsp don't delete @response files on success\n" -#ifdef _WIN32 -" nostatcache don't batch stat() calls per directory and cache them\n" -#endif -"multiple modes can be enabled via -d FOO -d BAR\n"); - return false; - } else if (name == "stats") { - g_metrics = new Metrics; - return true; - } else if (name == "explain") { - g_explaining = true; - return true; - } else if (name == "keepdepfile") { - g_keep_depfile = true; - return true; - } else if (name == "keeprsp") { - g_keep_rsp = true; - return true; - } else if (name == "nostatcache") { - g_experimental_statcache = false; - return true; - } else { - const char* suggestion = - SpellcheckString(name.c_str(), - "stats", "explain", "keepdepfile", "keeprsp", - "nostatcache", NULL); - if (suggestion) { - Error("unknown debug setting '%s', did you mean '%s'?", - name.c_str(), suggestion); - } else { - Error("unknown debug setting '%s'", name.c_str()); - } - return false; - } -} - -/// Set a warning flag. Returns false if Ninja should exit instead of -/// continuing. -bool WarningEnable(const string& name, Options* options) { - if (name == "list") { - printf("warning flags:\n" -" dupbuild={err,warn} multiple build lines for one target\n" -" phonycycle={err,warn} phony build statement references itself\n" -" depfilemulti={err,warn} depfile has multiple output paths on separate lines\n" - ); - return false; - } else if (name == "dupbuild=err") { - options->dupe_edges_should_err = true; - return true; - } else if (name == "dupbuild=warn") { - options->dupe_edges_should_err = false; - return true; - } else if (name == "phonycycle=err") { - options->phony_cycle_should_err = true; - return true; - } else if (name == "phonycycle=warn") { - options->phony_cycle_should_err = false; - return true; - } else if (name == "depfilemulti=err") { - options->depfile_distinct_target_lines_should_err = true; - return true; - } else if (name == "depfilemulti=warn") { - options->depfile_distinct_target_lines_should_err = false; - return true; - } else { - const char* suggestion = - SpellcheckString(name.c_str(), "dupbuild=err", "dupbuild=warn", - "phonycycle=err", "phonycycle=warn", NULL); - if (suggestion) { - Error("unknown warning flag '%s', did you mean '%s'?", - name.c_str(), suggestion); - } else { - Error("unknown warning flag '%s'", name.c_str()); - } - return false; - } -} - -bool NinjaMain::OpenBuildLog(bool recompact_only) { - string log_path = ".ninja_log"; - if (!build_dir_.empty()) - log_path = build_dir_ + "/" + log_path; - - string err; - if (!build_log_.Load(log_path, &err)) { - Error("loading build log %s: %s", log_path.c_str(), err.c_str()); - return false; - } - if (!err.empty()) { - // Hack: Load() can return a warning via err by returning true. - Warning("%s", err.c_str()); - err.clear(); - } - - if (recompact_only) { - bool success = build_log_.Recompact(log_path, *this, &err); - if (!success) - Error("failed recompaction: %s", err.c_str()); - return success; - } - - if (!config_.dry_run) { - if (!build_log_.OpenForWrite(log_path, *this, &err)) { - Error("opening build log: %s", err.c_str()); - return false; - } - } - - return true; -} - -/// Open the deps log: load it, then open for writing. -/// @return false on error. -bool NinjaMain::OpenDepsLog(bool recompact_only) { -#if 0 - string path = ".ninja_deps"; - if (!build_dir_.empty()) - path = build_dir_ + "/" + path; - - string err; - if (!deps_log_.Load(path, &state_, &err)) { - Error("loading deps log %s: %s", path.c_str(), err.c_str()); - return false; - } - if (!err.empty()) { - // Hack: Load() can return a warning via err by returning true. - Warning("%s", err.c_str()); - err.clear(); - } - - if (recompact_only) { - bool success = deps_log_.Recompact(path, &err); - if (!success) - Error("failed recompaction: %s", err.c_str()); - return success; - } - - if (!config_.dry_run) { - if (!deps_log_.OpenForWrite(path, &err)) { - Error("opening deps log: %s", err.c_str()); - return false; - } - } -#endif - return true; -} - -void NinjaMain::DumpMetrics() { - g_metrics->Report(); - - printf("\n"); - int count = (int)state_.paths_.size(); - int buckets = (int)state_.paths_.bucket_count(); - printf("path->node hash load %.2f (%d entries / %d buckets)\n", - count / (double) buckets, count, buckets); -} - -bool NinjaMain::EnsureBuildDirExists() { - build_dir_ = state_.bindings_.LookupVariable("builddir"); - if (!build_dir_.empty() && !config_.dry_run) { - if (!disk_interface_.MakeDirs(build_dir_ + "/.") && errno != EEXIST) { - Error("creating build directory %s: %s", - build_dir_.c_str(), strerror(errno)); - return false; - } - } - return true; -} - -int NinjaMain::RunBuild(int argc, char** argv) { - string err; - vector targets; - if (!CollectTargetsFromArgs(argc, argv, &targets, &err)) { - Error("%s", err.c_str()); - return 1; - } - - disk_interface_.AllowStatCache(g_experimental_statcache); - FILE* compiler_log_ = NULL; - if (!config_.dry_run) { - string compiler_log_path = ".compiler.log"; - if (!build_dir_.empty()) { - compiler_log_path = build_dir_ + "/" + compiler_log_path; - } - compiler_log_ = fopen(compiler_log_path.c_str(), "w"); - fprintf(compiler_log_,"#Start(%" PRId64 ")\n", GetTimeMillis()); - setvbuf(compiler_log_, NULL, _IOLBF, BUFSIZ); - SetCloseOnExec(fileno(compiler_log_)); - } - Builder builder(&state_, config_, &build_log_, &deps_log_, &disk_interface_,compiler_log_); - - for (size_t i = 0; i < targets.size(); ++i) { - if (!builder.AddTarget(targets[i], &err)) { - if (!err.empty()) { - Error("%s", err.c_str()); - return 1; - } else { - // Added a target that is already up-to-date; not really - // an error. - } - } - } - - // Make sure restat rules do not see stale timestamps. - disk_interface_.AllowStatCache(false); - - if (builder.AlreadyUpToDate()) { - if (config_.verbosity == BuildConfig::VERBOSE) { - printf("rescript: no work to do.\n"); - } - fprintf(compiler_log_, "#Done(%" PRId64 ")\n", GetTimeMillis()); - fclose(compiler_log_); - return 0; - } - - if (!builder.Build(&err)) { - - if (ShouldBeColorFul(false)) { - printf("\x1b[31m" "FAILED:" "\x1b[0m" " %s.\n", err.c_str()); - } else { - printf("FAILED: %s.\n", err.c_str()); - } - fprintf(compiler_log_, "FAILED: %s.\n", err.c_str()); - - fprintf(compiler_log_, "#Done(%" PRId64 ")\n", GetTimeMillis()); - fclose(compiler_log_); - - if (err.find("interrupted by user") != string::npos) { - return 2; - } - return 1; - } - - fprintf(compiler_log_, "#Done(%" PRId64 ")\n", GetTimeMillis()); - fclose(compiler_log_); - - return 0; -} - -#ifdef _MSC_VER - -/// This handler processes fatal crashes that you can't catch -/// Test example: C++ exception in a stack-unwind-block -/// Real-world example: ninja launched a compiler to process a tricky -/// C++ input file. The compiler got itself into a state where it -/// generated 3 GB of output and caused ninja to crash. -void TerminateHandler() { - CreateWin32MiniDump(NULL); - Fatal("terminate handler called"); -} - -/// On Windows, we want to prevent error dialogs in case of exceptions. -/// This function handles the exception, and writes a minidump. -int ExceptionFilter(unsigned int code, struct _EXCEPTION_POINTERS *ep) { - Error("exception: 0x%X", code); // e.g. EXCEPTION_ACCESS_VIOLATION - fflush(stderr); - CreateWin32MiniDump(ep); - return EXCEPTION_EXECUTE_HANDLER; -} - -#endif // _MSC_VER - -/// Parse argv for command-line options. -/// Returns an exit code, or -1 if Ninja should continue. -int ReadFlags(int* argc, char*** argv, - Options* options, BuildConfig* config) { - config->parallelism = GuessParallelism(); - - enum { OPT_VERSION = 1 }; - const option kLongOptions[] = { - { "help", no_argument, NULL, 'h' }, - { "version", no_argument, NULL, OPT_VERSION }, - { "verbose", no_argument, NULL, 'v' }, - { NULL, 0, NULL, 0 } - }; - - int opt; - while (!options->tool && - (opt = getopt_long(*argc, *argv, "d:f:j:k:l:nt:vw:C:h", kLongOptions, - NULL)) != -1) { - switch (opt) { - case 'd': - if (!DebugEnable(optarg)) - return 1; - break; - case 'f': - options->input_file = optarg; - break; - case 'j': { - char* end; - int value = strtol(optarg, &end, 10); - if (*end != 0 || value < 0) - Fatal("invalid -j parameter"); - - // We want to run N jobs in parallel. For N = 0, INT_MAX - // is close enough to infinite for most sane builds. - config->parallelism = value > 0 ? value : INT_MAX; - break; - } - case 'k': { - char* end; - int value = strtol(optarg, &end, 10); - if (*end != 0) - Fatal("-k parameter not numeric; did you mean -k 0?"); - - // We want to go until N jobs fail, which means we should allow - // N failures and then stop. For N <= 0, INT_MAX is close enough - // to infinite for most sane builds. - config->failures_allowed = value > 0 ? value : INT_MAX; - break; - } - case 'l': { - char* end; - double value = strtod(optarg, &end); - if (end == optarg) - Fatal("-l parameter not numeric: did you mean -l 0.0?"); - config->max_load_average = value; - break; - } - case 'n': - config->dry_run = true; - break; - case 't': - options->tool = ChooseTool(optarg); - if (!options->tool) - return 0; - break; - case 'v': - config->verbosity = BuildConfig::VERBOSE; - break; - case 'w': - if (!WarningEnable(optarg, options)) - return 1; - break; - case 'C': - options->working_dir = optarg; - break; - case OPT_VERSION: - printf("%s\n", kNinjaVersion); - return 0; - case 'h': - default: - Usage(*config); - return 1; - } - } - *argv += optind; - *argc -= optind; - - return -1; -} - -NORETURN void real_main(int argc, char** argv) { - // Use exit() instead of return in this function to avoid potentially - // expensive cleanup when destructing NinjaMain. - BuildConfig config; - Options options = {}; - options.input_file = "build.ninja"; - options.dupe_edges_should_err = true; - - setvbuf(stdout, NULL, _IOLBF, BUFSIZ); - const char* ninja_command = argv[0]; - - int exit_code = ReadFlags(&argc, &argv, &options, &config); - if (exit_code >= 0) - exit(exit_code); - - if (options.depfile_distinct_target_lines_should_err) { - config.depfile_parser_options.depfile_distinct_target_lines_action_ = - kDepfileDistinctTargetLinesActionError; - } - - if (options.working_dir) { - // The formatting of this string, complete with funny quotes, is - // so Emacs can properly identify that the cwd has changed for - // subsequent commands. - // Don't print this if a tool is being used, so that tool output - // can be piped into a file without this string showing up. - if (!options.tool && config.verbosity == BuildConfig::VERBOSE) - printf("rescript: Entering directory `%s'\n", options.working_dir); - if (chdir(options.working_dir) < 0) { - Fatal("chdir to '%s' - %s", options.working_dir, strerror(errno)); - } - } -#if 0 - if (options.tool && options.tool->when == Tool::RUN_AFTER_FLAGS) { - // None of the RUN_AFTER_FLAGS actually use a NinjaMain, but it's needed - // by other tools. - NinjaMain ninja(ninja_command, config); - exit((ninja.*options.tool->func)(&options, argc, argv)); - } -#endif - // Limit number of rebuilds, to prevent infinite loops. - const int kCycleLimit = 100; - for (int cycle = 1; cycle <= kCycleLimit; ++cycle) { - NinjaMain ninja(ninja_command, config); - - ManifestParserOptions parser_opts; - if (options.dupe_edges_should_err) { - parser_opts.dupe_edge_action_ = kDupeEdgeActionError; - } - if (options.phony_cycle_should_err) { - parser_opts.phony_cycle_action_ = kPhonyCycleActionError; - } - ManifestParser parser(&ninja.state_, &ninja.disk_interface_, parser_opts); - string err; - if (!parser.Load(options.input_file, &err)) { - Error("%s", err.c_str()); - exit(1); - } - - if (options.tool && options.tool->when == Tool::RUN_AFTER_LOAD) - exit((ninja.*options.tool->func)(&options, argc, argv)); - - if (!ninja.EnsureBuildDirExists()) - exit(1); - - if (!ninja.OpenBuildLog() || !ninja.OpenDepsLog()) - exit(1); - - if (options.tool && options.tool->when == Tool::RUN_AFTER_LOGS) - exit((ninja.*options.tool->func)(&options, argc, argv)); - - if (ninja.state_.rescript_mode_) { - ninja.ToolCleanDead(&options, argc, argv); - } - -#if 0 - // Attempt to rebuild the manifest before building anything else - if (ninja.RebuildManifest(options.input_file, &err)) { - // In dry_run mode the regeneration will succeed without changing the - // manifest forever. Better to return immediately. - if (config.dry_run) - exit(0); - // Start the build over with the new manifest. - continue; - } else if (!err.empty()) { - Error("rebuilding '%s': %s", options.input_file, err.c_str()); - exit(1); - } -#endif - int result = ninja.RunBuild(argc, argv); - if (g_metrics) - ninja.DumpMetrics(); - exit(result); - } - - Error("manifest '%s' still dirty after %d tries\n", - options.input_file, kCycleLimit); - exit(1); -} - -} // anonymous namespace - -int main(int argc, char** argv) { -#if defined(_MSC_VER) - // Set a handler to catch crashes not caught by the __try..__except - // block (e.g. an exception in a stack-unwind-block). - std::set_terminate(TerminateHandler); - __try { - // Running inside __try ... __except suppresses any Windows error - // dialogs for errors such as bad_alloc. - real_main(argc, argv); - } - __except(ExceptionFilter(GetExceptionCode(), GetExceptionInformation())) { - // Common error situations return exitCode=1. 2 was chosen to - // indicate a more serious problem. - return 2; - } -#else - real_main(argc, argv); -#endif -} diff --git a/ninja/src/ninja_test.cc b/ninja/src/ninja_test.cc deleted file mode 100644 index d642c5c90d2..00000000000 --- a/ninja/src/ninja_test.cc +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -#include - -#ifdef _WIN32 -#include "getopt.h" -#elif defined(_AIX) -#include "getopt.h" -#include -#else -#include -#endif - -#include "test.h" -#include "line_printer.h" - -struct RegisteredTest { - testing::Test* (*factory)(); - const char *name; - bool should_run; -}; -// This can't be a vector because tests call RegisterTest from static -// initializers and the order static initializers run it isn't specified. So -// the vector constructor isn't guaranteed to run before all of the -// RegisterTest() calls. -static RegisteredTest tests[10000]; -testing::Test* g_current_test; -static int ntests; -static LinePrinter printer; - -void RegisterTest(testing::Test* (*factory)(), const char* name) { - tests[ntests].factory = factory; - tests[ntests++].name = name; -} - -namespace { -string StringPrintf(const char* format, ...) { - const int N = 1024; - char buf[N]; - - va_list ap; - va_start(ap, format); - vsnprintf(buf, N, format, ap); - va_end(ap); - - return buf; -} - -void Usage() { - fprintf(stderr, -"usage: ninja_tests [options]\n" -"\n" -"options:\n" -" --gtest_filter=POSTIVE_PATTERN[-NEGATIVE_PATTERN]\n" -" Run tests whose names match the positive but not the negative pattern.\n" -" '*' matches any substring. (gtest's ':', '?' are not implemented).\n"); -} - -bool PatternMatchesString(const char* pattern, const char* str) { - switch (*pattern) { - case '\0': - case '-': return *str == '\0'; - case '*': return (*str != '\0' && PatternMatchesString(pattern, str + 1)) || - PatternMatchesString(pattern + 1, str); - default: return *pattern == *str && - PatternMatchesString(pattern + 1, str + 1); - } -} - -bool TestMatchesFilter(const char* test, const char* filter) { - // Split --gtest_filter at '-' into positive and negative filters. - const char* const dash = strchr(filter, '-'); - const char* pos = dash == filter ? "*" : filter; //Treat '-test1' as '*-test1' - const char* neg = dash ? dash + 1 : ""; - return PatternMatchesString(pos, test) && !PatternMatchesString(neg, test); -} - -bool ReadFlags(int* argc, char*** argv, const char** test_filter) { - enum { OPT_GTEST_FILTER = 1 }; - const option kLongOptions[] = { - { "gtest_filter", required_argument, NULL, OPT_GTEST_FILTER }, - { NULL, 0, NULL, 0 } - }; - - int opt; - while ((opt = getopt_long(*argc, *argv, "h", kLongOptions, NULL)) != -1) { - switch (opt) { - case OPT_GTEST_FILTER: - if (strchr(optarg, '?') == NULL && strchr(optarg, ':') == NULL) { - *test_filter = optarg; - break; - } // else fall through. - default: - Usage(); - return false; - } - } - *argv += optind; - *argc -= optind; - return true; -} - -} // namespace - -bool testing::Test::Check(bool condition, const char* file, int line, - const char* error) { - if (!condition) { - printer.PrintOnNewLine( - StringPrintf("*** Failure in %s:%d\n%s\n", file, line, error)); - failed_ = true; - } - return condition; -} - -int main(int argc, char **argv) { - int tests_started = 0; - - const char* test_filter = "*"; - if (!ReadFlags(&argc, &argv, &test_filter)) - return 1; - - int nactivetests = 0; - for (int i = 0; i < ntests; i++) - if ((tests[i].should_run = TestMatchesFilter(tests[i].name, test_filter))) - ++nactivetests; - - bool passed = true; - for (int i = 0; i < ntests; i++) { - if (!tests[i].should_run) continue; - - ++tests_started; - testing::Test* test = tests[i].factory(); - printer.Print( - StringPrintf("[%d/%d] %s", tests_started, nactivetests, tests[i].name), - LinePrinter::ELIDE); - test->SetUp(); - test->Run(); - test->TearDown(); - if (test->Failed()) - passed = false; - delete test; - } - - printer.PrintOnNewLine(passed ? "passed\n" : "failed\n"); - return passed ? EXIT_SUCCESS : EXIT_FAILURE; -} diff --git a/ninja/src/parser.cc b/ninja/src/parser.cc deleted file mode 100644 index 745c532d72c..00000000000 --- a/ninja/src/parser.cc +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "parser.h" - -#include "disk_interface.h" -#include "metrics.h" - -bool Parser::Load(const string& filename, string* err, Lexer* parent) { - METRIC_RECORD(".ninja parse"); - string contents; - string read_err; - if (file_reader_->ReadFile(filename, &contents, &read_err) != - FileReader::Okay) { - *err = "loading '" + filename + "': " + read_err; - if (parent) - parent->Error(string(*err), err); - return false; - } - - // The lexer needs a nul byte at the end of its input, to know when it's done. - // It takes a StringPiece, and StringPiece's string constructor uses - // string::data(). data()'s return value isn't guaranteed to be - // null-terminated (although in practice - libc++, libstdc++, msvc's stl -- - // it is, and C++11 demands that too), so add an explicit nul byte. - contents.resize(contents.size() + 1); - - return Parse(filename, contents, err); -} - -bool Parser::ExpectToken(Lexer::Token expected, string* err) { - Lexer::Token token = lexer_.ReadToken(); - if (token != expected) { - string message = string("expected ") + Lexer::TokenName(expected); - message += string(", got ") + Lexer::TokenName(token); - message += Lexer::TokenErrorHint(expected); - return lexer_.Error(message, err); - } - return true; -} diff --git a/ninja/src/parser.h b/ninja/src/parser.h deleted file mode 100644 index 84694404f68..00000000000 --- a/ninja/src/parser.h +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2018 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_PARSER_H_ -#define NINJA_PARSER_H_ - -#include - -using namespace std; - -#include "lexer.h" - -struct DiskInterface; -struct State; - -/// Base class for parsers. -struct Parser { - Parser(State* state, DiskInterface* file_reader) - : state_(state), file_reader_(file_reader) {} - - /// Load and parse a file. - bool Load(const string& filename, string* err, Lexer* parent = NULL); - -protected: - /// If the next token is not \a expected, produce an error string - /// saying "expected foo, got bar". - bool ExpectToken(Lexer::Token expected, string* err); - - State* state_; - DiskInterface* file_reader_; - Lexer lexer_; - -private: - /// Parse a file, given its contents as a string. - virtual bool Parse(const string& filename, const string& input, - string* err) = 0; -}; - -#endif // NINJA_PARSER_H_ diff --git a/ninja/src/state.cc b/ninja/src/state.cc deleted file mode 100644 index 8bd295b656e..00000000000 --- a/ninja/src/state.cc +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "state.h" - -#include -#include - -#include "edit_distance.h" -#include "graph.h" -#include "metrics.h" -#include "util.h" - - -void Pool::EdgeScheduled(const Edge& edge) { - if (depth_ != 0) - current_use_ += edge.weight(); -} - -void Pool::EdgeFinished(const Edge& edge) { - if (depth_ != 0) - current_use_ -= edge.weight(); -} - -void Pool::DelayEdge(Edge* edge) { - assert(depth_ != 0); - delayed_.insert(edge); -} - -void Pool::RetrieveReadyEdges(set* ready_queue) { - DelayedEdges::iterator it = delayed_.begin(); - while (it != delayed_.end()) { - Edge* edge = *it; - if (current_use_ + edge->weight() > depth_) - break; - ready_queue->insert(edge); - EdgeScheduled(*edge); - ++it; - } - delayed_.erase(delayed_.begin(), it); -} - -void Pool::Dump() const { - printf("%s (%d/%d) ->\n", name_.c_str(), current_use_, depth_); - for (DelayedEdges::const_iterator it = delayed_.begin(); - it != delayed_.end(); ++it) - { - printf("\t"); - (*it)->Dump(); - } -} - -// static -bool Pool::WeightedEdgeCmp(const Edge* a, const Edge* b) { - if (!a) return b; - if (!b) return false; - int weight_diff = a->weight() - b->weight(); - return ((weight_diff < 0) || (weight_diff == 0 && a < b)); -} - -Pool State::kDefaultPool("", 0); -Pool State::kConsolePool("console", 1); -const Rule State::kPhonyRule("phony"); - -State::State() { - bindings_.AddRule(&kPhonyRule); - AddPool(&kDefaultPool); - AddPool(&kConsolePool); - rescript_mode_ = false; -} - -void State::AddPool(Pool* pool) { - assert(LookupPool(pool->name()) == NULL); - pools_[pool->name()] = pool; -} - -Pool* State::LookupPool(const string& pool_name) { - map::iterator i = pools_.find(pool_name); - if (i == pools_.end()) - return NULL; - return i->second; -} - -Edge* State::AddEdge(const Rule* rule) { - Edge* edge = new Edge(); - edge->rule_ = rule; - edge->pool_ = &State::kDefaultPool; - edge->env_ = &bindings_; - edges_.push_back(edge); - return edge; -} - -Node* State::GetNode(StringPiece path, uint64_t slash_bits) { - Node* node = LookupNode(path); - if (node) - return node; - node = new Node(path.AsString(), slash_bits); - paths_[node->path()] = node; - return node; -} - -Node* State::LookupNode(StringPiece path) const { - METRIC_RECORD("lookup node"); - Paths::const_iterator i = paths_.find(path); - if (i != paths_.end()) - return i->second; - return NULL; -} - -Node* State::SpellcheckNode(const string& path) { - const bool kAllowReplacements = true; - const int kMaxValidEditDistance = 3; - - int min_distance = kMaxValidEditDistance + 1; - Node* result = NULL; - for (Paths::iterator i = paths_.begin(); i != paths_.end(); ++i) { - int distance = EditDistance( - i->first, path, kAllowReplacements, kMaxValidEditDistance); - if (distance < min_distance && i->second) { - min_distance = distance; - result = i->second; - } - } - return result; -} - -void State::AddIn(Edge* edge, StringPiece path, uint64_t slash_bits) { - Node* node = GetNode(path, slash_bits); - edge->inputs_.push_back(node); - node->AddOutEdge(edge); -} - -bool State::AddOut(Edge* edge, StringPiece path, uint64_t slash_bits) { - Node* node = GetNode(path, slash_bits); - if (node->in_edge()) - return false; - edge->outputs_.push_back(node); - node->set_in_edge(edge); - return true; -} - -bool State::AddDefault(StringPiece path, string* err) { - Node* node = LookupNode(path); - if (!node) { - *err = "unknown target '" + path.AsString() + "'"; - return false; - } - defaults_.push_back(node); - return true; -} - -vector State::RootNodes(string* err) const { - vector root_nodes; - // Search for nodes with no output. - for (vector::const_iterator e = edges_.begin(); - e != edges_.end(); ++e) { - for (vector::const_iterator out = (*e)->outputs_.begin(); - out != (*e)->outputs_.end(); ++out) { - if ((*out)->out_edges().empty()) - root_nodes.push_back(*out); - } - } - - if (!edges_.empty() && root_nodes.empty()) - *err = "could not determine root nodes of build graph"; - - return root_nodes; -} - -vector State::DefaultNodes(string* err) const { - return defaults_.empty() ? RootNodes(err) : defaults_; -} - -void State::Reset() { - for (Paths::iterator i = paths_.begin(); i != paths_.end(); ++i) - i->second->ResetState(); - for (vector::iterator e = edges_.begin(); e != edges_.end(); ++e) { - (*e)->outputs_ready_ = false; - (*e)->deps_loaded_ = false; - (*e)->mark_ = Edge::VisitNone; - } -} - -void State::Dump() { - for (Paths::iterator i = paths_.begin(); i != paths_.end(); ++i) { - Node* node = i->second; - printf("%s %s [id:%d]\n", - node->path().c_str(), - node->status_known() ? (node->dirty() ? "dirty" : "clean") - : "unknown", - 0/*node->id()*/); - } - if (!pools_.empty()) { - printf("resource_pools:\n"); - for (map::const_iterator it = pools_.begin(); - it != pools_.end(); ++it) - { - if (!it->second->name().empty()) { - it->second->Dump(); - } - } - } -} diff --git a/ninja/src/state.h b/ninja/src/state.h deleted file mode 100644 index b050390bb67..00000000000 --- a/ninja/src/state.h +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_STATE_H_ -#define NINJA_STATE_H_ - -#include -#include -#include -#include -using namespace std; - -#include "eval_env.h" -#include "hash_map.h" -#include "util.h" - -struct Edge; -struct Node; -struct Rule; - -/// A pool for delayed edges. -/// Pools are scoped to a State. Edges within a State will share Pools. A Pool -/// will keep a count of the total 'weight' of the currently scheduled edges. If -/// a Plan attempts to schedule an Edge which would cause the total weight to -/// exceed the depth of the Pool, the Pool will enqueue the Edge instead of -/// allowing the Plan to schedule it. The Pool will relinquish queued Edges when -/// the total scheduled weight diminishes enough (i.e. when a scheduled edge -/// completes). -struct Pool { - Pool(const string& name, int depth) - : name_(name), current_use_(0), depth_(depth), delayed_(&WeightedEdgeCmp) {} - - // A depth of 0 is infinite - bool is_valid() const { return depth_ >= 0; } - int depth() const { return depth_; } - const string& name() const { return name_; } - int current_use() const { return current_use_; } - - /// true if the Pool might delay this edge - bool ShouldDelayEdge() const { return depth_ != 0; } - - /// informs this Pool that the given edge is committed to be run. - /// Pool will count this edge as using resources from this pool. - void EdgeScheduled(const Edge& edge); - - /// informs this Pool that the given edge is no longer runnable, and should - /// relinquish its resources back to the pool - void EdgeFinished(const Edge& edge); - - /// adds the given edge to this Pool to be delayed. - void DelayEdge(Edge* edge); - - /// Pool will add zero or more edges to the ready_queue - void RetrieveReadyEdges(set* ready_queue); - - /// Dump the Pool and its edges (useful for debugging). - void Dump() const; - - private: - string name_; - - /// |current_use_| is the total of the weights of the edges which are - /// currently scheduled in the Plan (i.e. the edges in Plan::ready_). - int current_use_; - int depth_; - - static bool WeightedEdgeCmp(const Edge* a, const Edge* b); - - typedef set DelayedEdges; - DelayedEdges delayed_; -}; - -/// Global state (file status) for a single run. -struct State { - static Pool kDefaultPool; - static Pool kConsolePool; - static const Rule kPhonyRule; - - State(); - - void AddPool(Pool* pool); - Pool* LookupPool(const string& pool_name); - - Edge* AddEdge(const Rule* rule); - - Node* GetNode(StringPiece path, uint64_t slash_bits); - Node* LookupNode(StringPiece path) const; - Node* SpellcheckNode(const string& path); - - void AddIn(Edge* edge, StringPiece path, uint64_t slash_bits); - bool AddOut(Edge* edge, StringPiece path, uint64_t slash_bits); - bool AddDefault(StringPiece path, string* error); - - /// Reset state. Keeps all nodes and edges, but restores them to the - /// state where we haven't yet examined the disk for dirty state. - void Reset(); - - /// Dump the nodes and Pools (useful for debugging). - void Dump(); - - /// @return the root node(s) of the graph. (Root nodes have no output edges). - /// @param error where to write the error message if somethings went wrong. - vector RootNodes(string* error) const; - vector DefaultNodes(string* error) const; - - /// Mapping of path -> Node. - typedef ExternalStringHashMap::Type Paths; - Paths paths_; - - /// All the pools used in the graph. - map pools_; - - /// All the edges of the graph. - vector edges_; - - BindingEnv bindings_; - vector defaults_; - - bool rescript_mode_; - string cleaner ; -}; - -#endif // NINJA_STATE_H_ diff --git a/ninja/src/state_test.cc b/ninja/src/state_test.cc deleted file mode 100644 index 458b5196c34..00000000000 --- a/ninja/src/state_test.cc +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "graph.h" -#include "state.h" -#include "test.h" - -namespace { - -TEST(State, Basic) { - State state; - - EvalString command; - command.AddText("cat "); - command.AddSpecial("in"); - command.AddText(" > "); - command.AddSpecial("out"); - - Rule* rule = new Rule("cat"); - rule->AddBinding("command", command); - state.bindings_.AddRule(rule); - - Edge* edge = state.AddEdge(rule); - state.AddIn(edge, "in1", 0); - state.AddIn(edge, "in2", 0); - state.AddOut(edge, "out", 0); - - EXPECT_EQ("cat in1 in2 > out", edge->EvaluateCommand()); - - EXPECT_FALSE(state.GetNode("in1", 0)->dirty()); - EXPECT_FALSE(state.GetNode("in2", 0)->dirty()); - EXPECT_FALSE(state.GetNode("out", 0)->dirty()); -} - -} // namespace diff --git a/ninja/src/string_piece.h b/ninja/src/string_piece.h deleted file mode 100644 index 7bf9d13e0f7..00000000000 --- a/ninja/src/string_piece.h +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_STRINGPIECE_H_ -#define NINJA_STRINGPIECE_H_ - -#include - -using namespace std; - -#include - -/// StringPiece represents a slice of a string whose memory is managed -/// externally. It is useful for reducing the number of std::strings -/// we need to allocate. -struct StringPiece { - typedef const char* const_iterator; - - StringPiece() : str_(NULL), len_(0) {} - - /// The constructors intentionally allow for implicit conversions. - StringPiece(const string& str) : str_(str.data()), len_(str.size()) {} - StringPiece(const char* str) : str_(str), len_(strlen(str)) {} - - StringPiece(const char* str, size_t len) : str_(str), len_(len) {} - - bool operator==(const StringPiece& other) const { - return len_ == other.len_ && memcmp(str_, other.str_, len_) == 0; - } - bool operator!=(const StringPiece& other) const { - return !(*this == other); - } - bool IsSuffix(const string& other) const{ - size_t other_len_ = other.length(); - return other_len_ >= len_ && - memcmp(str_, other.c_str() + other_len_ - len_ , len_) == 0; - } - - bool IsSuffix(const StringPiece& other) const { - size_t other_len_ = other.len_; - return other_len_ >= len_ && - memcmp(str_, other.str_ + other_len_ - len_, len_) == 0; - } - /// Convert the slice into a full-fledged std::string, copying the - /// data into a new string. - string AsString() const { - return len_ ? string(str_, len_) : string(); - } - - const_iterator begin() const { - return str_; - } - - const_iterator end() const { - return str_ + len_; - } - - char operator[](size_t pos) const { - return str_[pos]; - } - - static const StringPiece& getJsSuffix(){ - static const StringPiece js(".js", sizeof(".js") - 1 ); - return js; - } - static const StringPiece& getMjsSuffix() { - static const StringPiece js(".mjs", sizeof(".mjs") - 1); - return js; - } - static const StringPiece& getCjsSuffix() { - static const StringPiece js(".cjs", sizeof(".cjs") - 1); - return js; - } - static const StringPiece& getCmjSuffix() { - static const StringPiece js(".cmj", sizeof(".cmj") - 1); - return js; - } - size_t size() const { - return len_; - } - - const char* str_; - size_t len_; -}; - -#endif // NINJA_STRINGPIECE_H_ diff --git a/ninja/src/string_piece_util.cc b/ninja/src/string_piece_util.cc deleted file mode 100644 index 8e1ecfddd73..00000000000 --- a/ninja/src/string_piece_util.cc +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "string_piece_util.h" - -#include -#include -#include -using namespace std; - -vector SplitStringPiece(StringPiece input, char sep) { - vector elems; - elems.reserve(count(input.begin(), input.end(), sep) + 1); - - StringPiece::const_iterator pos = input.begin(); - - for (;;) { - const char* next_pos = find(pos, input.end(), sep); - if (next_pos == input.end()) { - elems.push_back(StringPiece(pos, input.end() - pos)); - break; - } - elems.push_back(StringPiece(pos, next_pos - pos)); - pos = next_pos + 1; - } - - return elems; -} - -string JoinStringPiece(const vector& list, char sep) { - if (list.size() == 0){ - return ""; - } - - string ret; - - { - size_t cap = list.size() - 1; - for (size_t i = 0; i < list.size(); ++i) { - cap += list[i].len_; - } - ret.reserve(cap); - } - - for (size_t i = 0; i < list.size(); ++i) { - if (i != 0) { - ret += sep; - } - ret.append(list[i].str_, list[i].len_); - } - - return ret; -} - -bool EqualsCaseInsensitiveASCII(StringPiece a, StringPiece b) { - if (a.len_ != b.len_) { - return false; - } - - for (size_t i = 0; i < a.len_; ++i) { - if (ToLowerASCII(a.str_[i]) != ToLowerASCII(b.str_[i])) { - return false; - } - } - - return true; -} diff --git a/ninja/src/string_piece_util.h b/ninja/src/string_piece_util.h deleted file mode 100644 index 2e40b9f3b28..00000000000 --- a/ninja/src/string_piece_util.h +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_STRINGPIECE_UTIL_H_ -#define NINJA_STRINGPIECE_UTIL_H_ - -#include -#include - -#include "string_piece.h" -using namespace std; - -vector SplitStringPiece(StringPiece input, char sep); - -string JoinStringPiece(const vector& list, char sep); - -inline char ToLowerASCII(char c) { - return (c >= 'A' && c <= 'Z') ? (c + ('a' - 'A')) : c; -} - -bool EqualsCaseInsensitiveASCII(StringPiece a, StringPiece b); - -#endif // NINJA_STRINGPIECE_UTIL_H_ diff --git a/ninja/src/string_piece_util_test.cc b/ninja/src/string_piece_util_test.cc deleted file mode 100644 index 648c6479180..00000000000 --- a/ninja/src/string_piece_util_test.cc +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2017 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "string_piece_util.h" - -#include "test.h" - -TEST(StringPieceUtilTest, SplitStringPiece) { - { - string input("a:b:c"); - vector list = SplitStringPiece(input, ':'); - - EXPECT_EQ(list.size(), 3); - - EXPECT_EQ(list[0], "a"); - EXPECT_EQ(list[1], "b"); - EXPECT_EQ(list[2], "c"); - } - - { - string empty(""); - vector list = SplitStringPiece(empty, ':'); - - EXPECT_EQ(list.size(), 1); - - EXPECT_EQ(list[0], ""); - } - - { - string one("a"); - vector list = SplitStringPiece(one, ':'); - - EXPECT_EQ(list.size(), 1); - - EXPECT_EQ(list[0], "a"); - } - - { - string sep_only(":"); - vector list = SplitStringPiece(sep_only, ':'); - - EXPECT_EQ(list.size(), 2); - - EXPECT_EQ(list[0], ""); - EXPECT_EQ(list[1], ""); - } - - { - string sep(":a:b:c:"); - vector list = SplitStringPiece(sep, ':'); - - EXPECT_EQ(list.size(), 5); - - EXPECT_EQ(list[0], ""); - EXPECT_EQ(list[1], "a"); - EXPECT_EQ(list[2], "b"); - EXPECT_EQ(list[3], "c"); - EXPECT_EQ(list[4], ""); - } -} - -TEST(StringPieceUtilTest, JoinStringPiece) { - { - string input("a:b:c"); - vector list = SplitStringPiece(input, ':'); - - EXPECT_EQ("a:b:c", JoinStringPiece(list, ':')); - EXPECT_EQ("a/b/c", JoinStringPiece(list, '/')); - } - - { - string empty(""); - vector list = SplitStringPiece(empty, ':'); - - EXPECT_EQ("", JoinStringPiece(list, ':')); - } - - { - vector empty_list; - - EXPECT_EQ("", JoinStringPiece(empty_list, ':')); - } - - { - string one("a"); - vector single_list = SplitStringPiece(one, ':'); - - EXPECT_EQ("a", JoinStringPiece(single_list, ':')); - } - - { - string sep(":a:b:c:"); - vector list = SplitStringPiece(sep, ':'); - - EXPECT_EQ(":a:b:c:", JoinStringPiece(list, ':')); - } -} - -TEST(StringPieceUtilTest, ToLowerASCII) { - EXPECT_EQ('a', ToLowerASCII('A')); - EXPECT_EQ('z', ToLowerASCII('Z')); - EXPECT_EQ('a', ToLowerASCII('a')); - EXPECT_EQ('z', ToLowerASCII('z')); - EXPECT_EQ('/', ToLowerASCII('/')); - EXPECT_EQ('1', ToLowerASCII('1')); -} - -TEST(StringPieceUtilTest, EqualsCaseInsensitiveASCII) { - EXPECT_TRUE(EqualsCaseInsensitiveASCII("abc", "abc")); - EXPECT_TRUE(EqualsCaseInsensitiveASCII("abc", "ABC")); - EXPECT_TRUE(EqualsCaseInsensitiveASCII("abc", "aBc")); - EXPECT_TRUE(EqualsCaseInsensitiveASCII("AbC", "aBc")); - EXPECT_TRUE(EqualsCaseInsensitiveASCII("", "")); - - EXPECT_FALSE(EqualsCaseInsensitiveASCII("a", "ac")); - EXPECT_FALSE(EqualsCaseInsensitiveASCII("/", "\\")); - EXPECT_FALSE(EqualsCaseInsensitiveASCII("1", "10")); -} diff --git a/ninja/src/subprocess-posix.cc b/ninja/src/subprocess-posix.cc deleted file mode 100644 index fc5543e85f7..00000000000 --- a/ninja/src/subprocess-posix.cc +++ /dev/null @@ -1,351 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "subprocess.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -extern char** environ; - -#include "util.h" - -Subprocess::Subprocess(bool use_console) : fd_(-1), pid_(-1), - use_console_(use_console) { -} - -Subprocess::~Subprocess() { - if (fd_ >= 0) - close(fd_); - // Reap child if forgotten. - if (pid_ != -1) - Finish(); -} - -bool Subprocess::Start(SubprocessSet* set, const string& command) { - int output_pipe[2]; - if (pipe(output_pipe) < 0) - Fatal("pipe: %s", strerror(errno)); - fd_ = output_pipe[0]; -#if !defined(USE_PPOLL) - // If available, we use ppoll in DoWork(); otherwise we use pselect - // and so must avoid overly-large FDs. - if (fd_ >= static_cast(FD_SETSIZE)) - Fatal("pipe: %s", strerror(EMFILE)); -#endif // !USE_PPOLL - SetCloseOnExec(fd_); - - posix_spawn_file_actions_t action; - int err = posix_spawn_file_actions_init(&action); - if (err != 0) - Fatal("posix_spawn_file_actions_init: %s", strerror(err)); - - err = posix_spawn_file_actions_addclose(&action, output_pipe[0]); - if (err != 0) - Fatal("posix_spawn_file_actions_addclose: %s", strerror(err)); - - posix_spawnattr_t attr; - err = posix_spawnattr_init(&attr); - if (err != 0) - Fatal("posix_spawnattr_init: %s", strerror(err)); - - short flags = 0; - - flags |= POSIX_SPAWN_SETSIGMASK; - err = posix_spawnattr_setsigmask(&attr, &set->old_mask_); - if (err != 0) - Fatal("posix_spawnattr_setsigmask: %s", strerror(err)); - // Signals which are set to be caught in the calling process image are set to - // default action in the new process image, so no explicit - // POSIX_SPAWN_SETSIGDEF parameter is needed. - - if (!use_console_) { - // Put the child in its own process group, so ctrl-c won't reach it. - flags |= POSIX_SPAWN_SETPGROUP; - // No need to posix_spawnattr_setpgroup(&attr, 0), it's the default. - - // Open /dev/null over stdin. - err = posix_spawn_file_actions_addopen(&action, 0, "/dev/null", O_RDONLY, - 0); - if (err != 0) { - Fatal("posix_spawn_file_actions_addopen: %s", strerror(err)); - } - - err = posix_spawn_file_actions_adddup2(&action, output_pipe[1], 1); - if (err != 0) - Fatal("posix_spawn_file_actions_adddup2: %s", strerror(err)); - err = posix_spawn_file_actions_adddup2(&action, output_pipe[1], 2); - if (err != 0) - Fatal("posix_spawn_file_actions_adddup2: %s", strerror(err)); - err = posix_spawn_file_actions_addclose(&action, output_pipe[1]); - if (err != 0) - Fatal("posix_spawn_file_actions_addclose: %s", strerror(err)); - // In the console case, output_pipe is still inherited by the child and - // closed when the subprocess finishes, which then notifies ninja. - } -#ifdef POSIX_SPAWN_USEVFORK - flags |= POSIX_SPAWN_USEVFORK; -#endif - - err = posix_spawnattr_setflags(&attr, flags); - if (err != 0) - Fatal("posix_spawnattr_setflags: %s", strerror(err)); - - const char* spawned_args[] = { "/bin/sh", "-c", command.c_str(), NULL }; - err = posix_spawn(&pid_, "/bin/sh", &action, &attr, - const_cast(spawned_args), environ); - if (err != 0) - Fatal("posix_spawn: %s", strerror(err)); - - err = posix_spawnattr_destroy(&attr); - if (err != 0) - Fatal("posix_spawnattr_destroy: %s", strerror(err)); - err = posix_spawn_file_actions_destroy(&action); - if (err != 0) - Fatal("posix_spawn_file_actions_destroy: %s", strerror(err)); - - close(output_pipe[1]); - return true; -} - -void Subprocess::OnPipeReady() { - char buf[4 << 10]; - ssize_t len = read(fd_, buf, sizeof(buf)); - if (len > 0) { - buf_.append(buf, len); - } else { - if (len < 0) - Fatal("read: %s", strerror(errno)); - close(fd_); - fd_ = -1; - } -} - -ExitStatus Subprocess::Finish() { - assert(pid_ != -1); - int status; - if (waitpid(pid_, &status, 0) < 0) - Fatal("waitpid(%d): %s", pid_, strerror(errno)); - pid_ = -1; - - if (WIFEXITED(status)) { - int exit = WEXITSTATUS(status); - if (exit == 0) - return ExitSuccess; - } else if (WIFSIGNALED(status)) { - if (WTERMSIG(status) == SIGINT || WTERMSIG(status) == SIGTERM - || WTERMSIG(status) == SIGHUP) - return ExitInterrupted; - } - return ExitFailure; -} - -bool Subprocess::Done() const { - return fd_ == -1; -} - -const string& Subprocess::GetOutput() const { - return buf_; -} - -int SubprocessSet::interrupted_; - -void SubprocessSet::SetInterruptedFlag(int signum) { - interrupted_ = signum; -} - -void SubprocessSet::HandlePendingInterruption() { - sigset_t pending; - sigemptyset(&pending); - if (sigpending(&pending) == -1) { - perror("ninja: sigpending"); - return; - } - if (sigismember(&pending, SIGINT)) - interrupted_ = SIGINT; - else if (sigismember(&pending, SIGTERM)) - interrupted_ = SIGTERM; - else if (sigismember(&pending, SIGHUP)) - interrupted_ = SIGHUP; -} - -SubprocessSet::SubprocessSet() { - sigset_t set; - sigemptyset(&set); - sigaddset(&set, SIGINT); - sigaddset(&set, SIGTERM); - sigaddset(&set, SIGHUP); - if (sigprocmask(SIG_BLOCK, &set, &old_mask_) < 0) - Fatal("sigprocmask: %s", strerror(errno)); - - struct sigaction act; - memset(&act, 0, sizeof(act)); - act.sa_handler = SetInterruptedFlag; - if (sigaction(SIGINT, &act, &old_int_act_) < 0) - Fatal("sigaction: %s", strerror(errno)); - if (sigaction(SIGTERM, &act, &old_term_act_) < 0) - Fatal("sigaction: %s", strerror(errno)); - if (sigaction(SIGHUP, &act, &old_hup_act_) < 0) - Fatal("sigaction: %s", strerror(errno)); -} - -SubprocessSet::~SubprocessSet() { - Clear(); - - if (sigaction(SIGINT, &old_int_act_, 0) < 0) - Fatal("sigaction: %s", strerror(errno)); - if (sigaction(SIGTERM, &old_term_act_, 0) < 0) - Fatal("sigaction: %s", strerror(errno)); - if (sigaction(SIGHUP, &old_hup_act_, 0) < 0) - Fatal("sigaction: %s", strerror(errno)); - if (sigprocmask(SIG_SETMASK, &old_mask_, 0) < 0) - Fatal("sigprocmask: %s", strerror(errno)); -} - -Subprocess *SubprocessSet::Add(const string& command, bool use_console) { - Subprocess *subprocess = new Subprocess(use_console); - if (!subprocess->Start(this, command)) { - delete subprocess; - return 0; - } - running_.push_back(subprocess); - return subprocess; -} - -#ifdef USE_PPOLL -bool SubprocessSet::DoWork() { - vector fds; - nfds_t nfds = 0; - - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) { - int fd = (*i)->fd_; - if (fd < 0) - continue; - pollfd pfd = { fd, POLLIN | POLLPRI, 0 }; - fds.push_back(pfd); - ++nfds; - } - - interrupted_ = 0; - int ret = ppoll(&fds.front(), nfds, NULL, &old_mask_); - if (ret == -1) { - if (errno != EINTR) { - perror("ninja: ppoll"); - return false; - } - return IsInterrupted(); - } - - HandlePendingInterruption(); - if (IsInterrupted()) - return true; - - nfds_t cur_nfd = 0; - for (vector::iterator i = running_.begin(); - i != running_.end(); ) { - int fd = (*i)->fd_; - if (fd < 0) - continue; - assert(fd == fds[cur_nfd].fd); - if (fds[cur_nfd++].revents) { - (*i)->OnPipeReady(); - if ((*i)->Done()) { - finished_.push(*i); - i = running_.erase(i); - continue; - } - } - ++i; - } - - return IsInterrupted(); -} - -#else // !defined(USE_PPOLL) -bool SubprocessSet::DoWork() { - fd_set set; - int nfds = 0; - FD_ZERO(&set); - - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) { - int fd = (*i)->fd_; - if (fd >= 0) { - FD_SET(fd, &set); - if (nfds < fd+1) - nfds = fd+1; - } - } - - interrupted_ = 0; - int ret = pselect(nfds, &set, 0, 0, 0, &old_mask_); - if (ret == -1) { - if (errno != EINTR) { - perror("ninja: pselect"); - return false; - } - return IsInterrupted(); - } - - HandlePendingInterruption(); - if (IsInterrupted()) - return true; - - for (vector::iterator i = running_.begin(); - i != running_.end(); ) { - int fd = (*i)->fd_; - if (fd >= 0 && FD_ISSET(fd, &set)) { - (*i)->OnPipeReady(); - if ((*i)->Done()) { - finished_.push(*i); - i = running_.erase(i); - continue; - } - } - ++i; - } - - return IsInterrupted(); -} -#endif // !defined(USE_PPOLL) - -Subprocess* SubprocessSet::NextFinished() { - if (finished_.empty()) - return NULL; - Subprocess* subproc = finished_.front(); - finished_.pop(); - return subproc; -} - -void SubprocessSet::Clear() { - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) - // Since the foreground process is in our process group, it will receive - // the interruption signal (i.e. SIGINT or SIGTERM) at the same time as us. - if (!(*i)->use_console_) - kill(-(*i)->pid_, interrupted_); - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) - delete *i; - running_.clear(); -} diff --git a/ninja/src/subprocess-win32.cc b/ninja/src/subprocess-win32.cc deleted file mode 100644 index a4a76695248..00000000000 --- a/ninja/src/subprocess-win32.cc +++ /dev/null @@ -1,297 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "subprocess.h" - -#include -#include - -#include - -#include "util.h" - -Subprocess::Subprocess(bool use_console) : child_(NULL) , overlapped_(), - is_reading_(false), - use_console_(use_console) { -} - -Subprocess::~Subprocess() { - if (pipe_) { - if (!CloseHandle(pipe_)) - Win32Fatal("CloseHandle"); - } - // Reap child if forgotten. - if (child_) - Finish(); -} - -HANDLE Subprocess::SetupPipe(HANDLE ioport) { - char pipe_name[100]; - snprintf(pipe_name, sizeof(pipe_name), - "\\\\.\\pipe\\ninja_pid%lu_sp%p", GetCurrentProcessId(), this); - - pipe_ = ::CreateNamedPipeA(pipe_name, - PIPE_ACCESS_INBOUND | FILE_FLAG_OVERLAPPED, - PIPE_TYPE_BYTE, - PIPE_UNLIMITED_INSTANCES, - 0, 0, INFINITE, NULL); - if (pipe_ == INVALID_HANDLE_VALUE) - Win32Fatal("CreateNamedPipe"); - - if (!CreateIoCompletionPort(pipe_, ioport, (ULONG_PTR)this, 0)) - Win32Fatal("CreateIoCompletionPort"); - - memset(&overlapped_, 0, sizeof(overlapped_)); - if (!ConnectNamedPipe(pipe_, &overlapped_) && - GetLastError() != ERROR_IO_PENDING) { - Win32Fatal("ConnectNamedPipe"); - } - - // Get the write end of the pipe as a handle inheritable across processes. - HANDLE output_write_handle = - CreateFileA(pipe_name, GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, NULL); - HANDLE output_write_child; - if (!DuplicateHandle(GetCurrentProcess(), output_write_handle, - GetCurrentProcess(), &output_write_child, - 0, TRUE, DUPLICATE_SAME_ACCESS)) { - Win32Fatal("DuplicateHandle"); - } - CloseHandle(output_write_handle); - - return output_write_child; -} - -bool Subprocess::Start(SubprocessSet* set, const string& command) { - HANDLE child_pipe = SetupPipe(set->ioport_); - - SECURITY_ATTRIBUTES security_attributes; - memset(&security_attributes, 0, sizeof(SECURITY_ATTRIBUTES)); - security_attributes.nLength = sizeof(SECURITY_ATTRIBUTES); - security_attributes.bInheritHandle = TRUE; - // Must be inheritable so subprocesses can dup to children. - HANDLE nul = - CreateFileA("NUL", GENERIC_READ, - FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, - &security_attributes, OPEN_EXISTING, 0, NULL); - if (nul == INVALID_HANDLE_VALUE) - Fatal("couldn't open nul"); - - STARTUPINFOA startup_info; - memset(&startup_info, 0, sizeof(startup_info)); - startup_info.cb = sizeof(STARTUPINFO); - if (!use_console_) { - startup_info.dwFlags = STARTF_USESTDHANDLES; - startup_info.hStdInput = nul; - startup_info.hStdOutput = child_pipe; - startup_info.hStdError = child_pipe; - } - // In the console case, child_pipe is still inherited by the child and closed - // when the subprocess finishes, which then notifies ninja. - - PROCESS_INFORMATION process_info; - memset(&process_info, 0, sizeof(process_info)); - - // Ninja handles ctrl-c, except for subprocesses in console pools. - DWORD process_flags = use_console_ ? 0 : CREATE_NEW_PROCESS_GROUP; - - // Do not prepend 'cmd /c' on Windows, this breaks command - // lines greater than 8,191 chars. - if (!CreateProcessA(NULL, (char*)command.c_str(), NULL, NULL, - /* inherit handles */ TRUE, process_flags, - NULL, NULL, - &startup_info, &process_info)) { - DWORD error = GetLastError(); - if (error == ERROR_FILE_NOT_FOUND) { - // File (program) not found error is treated as a normal build - // action failure. - if (child_pipe) - CloseHandle(child_pipe); - CloseHandle(pipe_); - CloseHandle(nul); - pipe_ = NULL; - // child_ is already NULL; - buf_ = "CreateProcess failed: The system cannot find the file " - "specified.\n"; - return true; - } else if (error == ERROR_INVALID_PARAMETER) { - // This generally means that the command line was too long. Give extra - // context for this case. - Win32Fatal("CreateProcess", "is the command line too long?"); - } else { - Win32Fatal("CreateProcess"); // pass all other errors to Win32Fatal - } - } - - // Close pipe channel only used by the child. - if (child_pipe) - CloseHandle(child_pipe); - CloseHandle(nul); - - CloseHandle(process_info.hThread); - child_ = process_info.hProcess; - - return true; -} - -void Subprocess::OnPipeReady() { - DWORD bytes; - if (!GetOverlappedResult(pipe_, &overlapped_, &bytes, TRUE)) { - if (GetLastError() == ERROR_BROKEN_PIPE) { - CloseHandle(pipe_); - pipe_ = NULL; - return; - } - Win32Fatal("GetOverlappedResult"); - } - - if (is_reading_ && bytes) - buf_.append(overlapped_buf_, bytes); - - memset(&overlapped_, 0, sizeof(overlapped_)); - is_reading_ = true; - if (!::ReadFile(pipe_, overlapped_buf_, sizeof(overlapped_buf_), - &bytes, &overlapped_)) { - if (GetLastError() == ERROR_BROKEN_PIPE) { - CloseHandle(pipe_); - pipe_ = NULL; - return; - } - if (GetLastError() != ERROR_IO_PENDING) - Win32Fatal("ReadFile"); - } - - // Even if we read any bytes in the readfile call, we'll enter this - // function again later and get them at that point. -} - -ExitStatus Subprocess::Finish() { - if (!child_) - return ExitFailure; - - // TODO: add error handling for all of these. - WaitForSingleObject(child_, INFINITE); - - DWORD exit_code = 0; - GetExitCodeProcess(child_, &exit_code); - - CloseHandle(child_); - child_ = NULL; - - return exit_code == 0 ? ExitSuccess : - exit_code == CONTROL_C_EXIT ? ExitInterrupted : - ExitFailure; -} - -bool Subprocess::Done() const { - return pipe_ == NULL; -} - -const string& Subprocess::GetOutput() const { - return buf_; -} - -HANDLE SubprocessSet::ioport_; - -SubprocessSet::SubprocessSet() { - ioport_ = ::CreateIoCompletionPort(INVALID_HANDLE_VALUE, NULL, 0, 1); - if (!ioport_) - Win32Fatal("CreateIoCompletionPort"); - if (!SetConsoleCtrlHandler(NotifyInterrupted, TRUE)) - Win32Fatal("SetConsoleCtrlHandler"); -} - -SubprocessSet::~SubprocessSet() { - Clear(); - - SetConsoleCtrlHandler(NotifyInterrupted, FALSE); - CloseHandle(ioport_); -} - -BOOL WINAPI SubprocessSet::NotifyInterrupted(DWORD dwCtrlType) { - if (dwCtrlType == CTRL_C_EVENT || dwCtrlType == CTRL_BREAK_EVENT) { - if (!PostQueuedCompletionStatus(ioport_, 0, 0, NULL)) - Win32Fatal("PostQueuedCompletionStatus"); - return TRUE; - } - - return FALSE; -} - -Subprocess *SubprocessSet::Add(const string& command, bool use_console) { - Subprocess *subprocess = new Subprocess(use_console); - if (!subprocess->Start(this, command)) { - delete subprocess; - return 0; - } - if (subprocess->child_) - running_.push_back(subprocess); - else - finished_.push(subprocess); - return subprocess; -} - -bool SubprocessSet::DoWork() { - DWORD bytes_read; - Subprocess* subproc; - OVERLAPPED* overlapped; - - if (!GetQueuedCompletionStatus(ioport_, &bytes_read, (PULONG_PTR)&subproc, - &overlapped, INFINITE)) { - if (GetLastError() != ERROR_BROKEN_PIPE) - Win32Fatal("GetQueuedCompletionStatus"); - } - - if (!subproc) // A NULL subproc indicates that we were interrupted and is - // delivered by NotifyInterrupted above. - return true; - - subproc->OnPipeReady(); - - if (subproc->Done()) { - vector::iterator end = - remove(running_.begin(), running_.end(), subproc); - if (running_.end() != end) { - finished_.push(subproc); - running_.resize(end - running_.begin()); - } - } - - return false; -} - -Subprocess* SubprocessSet::NextFinished() { - if (finished_.empty()) - return NULL; - Subprocess* subproc = finished_.front(); - finished_.pop(); - return subproc; -} - -void SubprocessSet::Clear() { - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) { - // Since the foreground process is in our process group, it will receive a - // CTRL_C_EVENT or CTRL_BREAK_EVENT at the same time as us. - if ((*i)->child_ && !(*i)->use_console_) { - if (!GenerateConsoleCtrlEvent(CTRL_BREAK_EVENT, - GetProcessId((*i)->child_))) { - Win32Fatal("GenerateConsoleCtrlEvent"); - } - } - } - for (vector::iterator i = running_.begin(); - i != running_.end(); ++i) - delete *i; - running_.clear(); -} diff --git a/ninja/src/subprocess.h b/ninja/src/subprocess.h deleted file mode 100644 index b2d486ca400..00000000000 --- a/ninja/src/subprocess.h +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_SUBPROCESS_H_ -#define NINJA_SUBPROCESS_H_ - -#include -#include -#include -using namespace std; - -#ifdef _WIN32 -#include -#else -#include -#endif - -// ppoll() exists on FreeBSD, but only on newer versions. -#ifdef __FreeBSD__ -# include -# if defined USE_PPOLL && __FreeBSD_version < 1002000 -# undef USE_PPOLL -# endif -#endif - -#include "exit_status.h" - -/// Subprocess wraps a single async subprocess. It is entirely -/// passive: it expects the caller to notify it when its fds are ready -/// for reading, as well as call Finish() to reap the child once done() -/// is true. -struct Subprocess { - ~Subprocess(); - - /// Returns ExitSuccess on successful process exit, ExitInterrupted if - /// the process was interrupted, ExitFailure if it otherwise failed. - ExitStatus Finish(); - - bool Done() const; - - const string& GetOutput() const; - - private: - Subprocess(bool use_console); - bool Start(struct SubprocessSet* set, const string& command); - void OnPipeReady(); - - string buf_; - -#ifdef _WIN32 - /// Set up pipe_ as the parent-side pipe of the subprocess; return the - /// other end of the pipe, usable in the child process. - HANDLE SetupPipe(HANDLE ioport); - - HANDLE child_; - HANDLE pipe_; - OVERLAPPED overlapped_; - char overlapped_buf_[4 << 10]; - bool is_reading_; -#else - int fd_; - pid_t pid_; -#endif - bool use_console_; - - friend struct SubprocessSet; -}; - -/// SubprocessSet runs a ppoll/pselect() loop around a set of Subprocesses. -/// DoWork() waits for any state change in subprocesses; finished_ -/// is a queue of subprocesses as they finish. -struct SubprocessSet { - SubprocessSet(); - ~SubprocessSet(); - - Subprocess* Add(const string& command, bool use_console = false); - bool DoWork(); - Subprocess* NextFinished(); - void Clear(); - - vector running_; - queue finished_; - -#ifdef _WIN32 - static BOOL WINAPI NotifyInterrupted(DWORD dwCtrlType); - static HANDLE ioport_; -#else - static void SetInterruptedFlag(int signum); - static void HandlePendingInterruption(); - /// Store the signal number that causes the interruption. - /// 0 if not interruption. - static int interrupted_; - - static bool IsInterrupted() { return interrupted_ != 0; } - - struct sigaction old_int_act_; - struct sigaction old_term_act_; - struct sigaction old_hup_act_; - sigset_t old_mask_; -#endif -}; - -#endif // NINJA_SUBPROCESS_H_ diff --git a/ninja/src/subprocess_test.cc b/ninja/src/subprocess_test.cc deleted file mode 100644 index 6e487dbde80..00000000000 --- a/ninja/src/subprocess_test.cc +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "subprocess.h" - -#include "test.h" - -#ifndef _WIN32 -// SetWithLots need setrlimit. -#include -#include -#include -#include -#endif - -namespace { - -#ifdef _WIN32 -const char* kSimpleCommand = "cmd /c dir \\"; -#else -const char* kSimpleCommand = "ls /"; -#endif - -struct SubprocessTest : public testing::Test { - SubprocessSet subprocs_; -}; - -} // anonymous namespace - -// Run a command that fails and emits to stderr. -TEST_F(SubprocessTest, BadCommandStderr) { - Subprocess* subproc = subprocs_.Add("cmd /c ninja_no_such_command"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - // Pretend we discovered that stderr was ready for writing. - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitFailure, subproc->Finish()); - EXPECT_NE("", subproc->GetOutput()); -} - -// Run a command that does not exist -TEST_F(SubprocessTest, NoSuchCommand) { - Subprocess* subproc = subprocs_.Add("ninja_no_such_command"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - // Pretend we discovered that stderr was ready for writing. - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitFailure, subproc->Finish()); - EXPECT_NE("", subproc->GetOutput()); -#ifdef _WIN32 - ASSERT_EQ("CreateProcess failed: The system cannot find the file " - "specified.\n", subproc->GetOutput()); -#endif -} - -#ifndef _WIN32 - -TEST_F(SubprocessTest, InterruptChild) { - Subprocess* subproc = subprocs_.Add("kill -INT $$"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitInterrupted, subproc->Finish()); -} - -TEST_F(SubprocessTest, InterruptParent) { - Subprocess* subproc = subprocs_.Add("kill -INT $PPID ; sleep 1"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - bool interrupted = subprocs_.DoWork(); - if (interrupted) - return; - } - - ASSERT_FALSE("We should have been interrupted"); -} - -TEST_F(SubprocessTest, InterruptChildWithSigTerm) { - Subprocess* subproc = subprocs_.Add("kill -TERM $$"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitInterrupted, subproc->Finish()); -} - -TEST_F(SubprocessTest, InterruptParentWithSigTerm) { - Subprocess* subproc = subprocs_.Add("kill -TERM $PPID ; sleep 1"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - bool interrupted = subprocs_.DoWork(); - if (interrupted) - return; - } - - ASSERT_FALSE("We should have been interrupted"); -} - -TEST_F(SubprocessTest, InterruptChildWithSigHup) { - Subprocess* subproc = subprocs_.Add("kill -HUP $$"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitInterrupted, subproc->Finish()); -} - -TEST_F(SubprocessTest, InterruptParentWithSigHup) { - Subprocess* subproc = subprocs_.Add("kill -HUP $PPID ; sleep 1"); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - bool interrupted = subprocs_.DoWork(); - if (interrupted) - return; - } - - ASSERT_FALSE("We should have been interrupted"); -} - -TEST_F(SubprocessTest, Console) { - // Skip test if we don't have the console ourselves. - if (isatty(0) && isatty(1) && isatty(2)) { - Subprocess* subproc = - subprocs_.Add("test -t 0 -a -t 1 -a -t 2", /*use_console=*/true); - ASSERT_NE((Subprocess*)0, subproc); - - while (!subproc->Done()) { - subprocs_.DoWork(); - } - - EXPECT_EQ(ExitSuccess, subproc->Finish()); - } -} - -#endif - -TEST_F(SubprocessTest, SetWithSingle) { - Subprocess* subproc = subprocs_.Add(kSimpleCommand); - ASSERT_NE((Subprocess *) 0, subproc); - - while (!subproc->Done()) { - subprocs_.DoWork(); - } - ASSERT_EQ(ExitSuccess, subproc->Finish()); - ASSERT_NE("", subproc->GetOutput()); - - ASSERT_EQ(1u, subprocs_.finished_.size()); -} - -TEST_F(SubprocessTest, SetWithMulti) { - Subprocess* processes[3]; - const char* kCommands[3] = { - kSimpleCommand, -#ifdef _WIN32 - "cmd /c echo hi", - "cmd /c time /t", -#else - "id -u", - "pwd", -#endif - }; - - for (int i = 0; i < 3; ++i) { - processes[i] = subprocs_.Add(kCommands[i]); - ASSERT_NE((Subprocess *) 0, processes[i]); - } - - ASSERT_EQ(3u, subprocs_.running_.size()); - for (int i = 0; i < 3; ++i) { - ASSERT_FALSE(processes[i]->Done()); - ASSERT_EQ("", processes[i]->GetOutput()); - } - - while (!processes[0]->Done() || !processes[1]->Done() || - !processes[2]->Done()) { - ASSERT_GT(subprocs_.running_.size(), 0u); - subprocs_.DoWork(); - } - - ASSERT_EQ(0u, subprocs_.running_.size()); - ASSERT_EQ(3u, subprocs_.finished_.size()); - - for (int i = 0; i < 3; ++i) { - ASSERT_EQ(ExitSuccess, processes[i]->Finish()); - ASSERT_NE("", processes[i]->GetOutput()); - delete processes[i]; - } -} - -#if defined(USE_PPOLL) -TEST_F(SubprocessTest, SetWithLots) { - // Arbitrary big number; needs to be over 1024 to confirm we're no longer - // hostage to pselect. - const unsigned kNumProcs = 1025; - - // Make sure [ulimit -n] isn't going to stop us from working. - rlimit rlim; - ASSERT_EQ(0, getrlimit(RLIMIT_NOFILE, &rlim)); - if (rlim.rlim_cur < kNumProcs) { - printf("Raise [ulimit -n] above %u (currently %lu) to make this test go\n", - kNumProcs, rlim.rlim_cur); - return; - } - - vector procs; - for (size_t i = 0; i < kNumProcs; ++i) { - Subprocess* subproc = subprocs_.Add("/bin/echo"); - ASSERT_NE((Subprocess *) 0, subproc); - procs.push_back(subproc); - } - while (!subprocs_.running_.empty()) - subprocs_.DoWork(); - for (size_t i = 0; i < procs.size(); ++i) { - ASSERT_EQ(ExitSuccess, procs[i]->Finish()); - ASSERT_NE("", procs[i]->GetOutput()); - } - ASSERT_EQ(kNumProcs, subprocs_.finished_.size()); -} -#endif // !__APPLE__ && !_WIN32 - -// TODO: this test could work on Windows, just not sure how to simply -// read stdin. -#ifndef _WIN32 -// Verify that a command that attempts to read stdin correctly thinks -// that stdin is closed. -TEST_F(SubprocessTest, ReadStdin) { - Subprocess* subproc = subprocs_.Add("cat -"); - while (!subproc->Done()) { - subprocs_.DoWork(); - } - ASSERT_EQ(ExitSuccess, subproc->Finish()); - ASSERT_EQ(1u, subprocs_.finished_.size()); -} -#endif // _WIN32 diff --git a/ninja/src/test.cc b/ninja/src/test.cc deleted file mode 100644 index a9816bc232d..00000000000 --- a/ninja/src/test.cc +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifdef _WIN32 -#include // Has to be before util.h is included. -#endif - -#include "test.h" - -#include - -#include -#include -#ifdef _WIN32 -#include -#else -#include -#endif - -#include "build_log.h" -#include "graph.h" -#include "manifest_parser.h" -#include "util.h" - -namespace { - -#ifdef _WIN32 -#ifndef _mktemp_s -/// mingw has no mktemp. Implement one with the same type as the one -/// found in the Windows API. -int _mktemp_s(char* templ) { - char* ofs = strchr(templ, 'X'); - sprintf(ofs, "%d", rand() % 1000000); - return 0; -} -#endif - -/// Windows has no mkdtemp. Implement it in terms of _mktemp_s. -char* mkdtemp(char* name_template) { - int err = _mktemp_s(name_template); - if (err < 0) { - perror("_mktemp_s"); - return NULL; - } - - err = _mkdir(name_template); - if (err < 0) { - perror("mkdir"); - return NULL; - } - - return name_template; -} -#endif // _WIN32 - -string GetSystemTempDir() { -#ifdef _WIN32 - char buf[1024]; - if (!GetTempPath(sizeof(buf), buf)) - return ""; - return buf; -#else - const char* tempdir = getenv("TMPDIR"); - if (tempdir) - return tempdir; - return "/tmp"; -#endif -} - -} // anonymous namespace - -StateTestWithBuiltinRules::StateTestWithBuiltinRules() { - AddCatRule(&state_); -} - -void StateTestWithBuiltinRules::AddCatRule(State* state) { - AssertParse(state, -"rule cat\n" -" command = cat $in > $out\n"); -} - -Node* StateTestWithBuiltinRules::GetNode(const string& path) { - EXPECT_FALSE(strpbrk(path.c_str(), "/\\")); - return state_.GetNode(path, 0); -} - -void AssertParse(State* state, const char* input, - ManifestParserOptions opts) { - ManifestParser parser(state, NULL, opts); - string err; - EXPECT_TRUE(parser.ParseTest(input, &err)); - ASSERT_EQ("", err); - VerifyGraph(*state); -} - -void AssertHash(const char* expected, uint64_t actual) { - ASSERT_EQ(BuildLog::LogEntry::HashCommand(expected), actual); -} - -void VerifyGraph(const State& state) { - for (vector::const_iterator e = state.edges_.begin(); - e != state.edges_.end(); ++e) { - // All edges need at least one output. - EXPECT_FALSE((*e)->outputs_.empty()); - // Check that the edge's inputs have the edge as out-edge. - for (vector::const_iterator in_node = (*e)->inputs_.begin(); - in_node != (*e)->inputs_.end(); ++in_node) { - const vector& out_edges = (*in_node)->out_edges(); - EXPECT_NE(find(out_edges.begin(), out_edges.end(), *e), - out_edges.end()); - } - // Check that the edge's outputs have the edge as in-edge. - for (vector::const_iterator out_node = (*e)->outputs_.begin(); - out_node != (*e)->outputs_.end(); ++out_node) { - EXPECT_EQ((*out_node)->in_edge(), *e); - } - } - - // The union of all in- and out-edges of each nodes should be exactly edges_. - set node_edge_set; - for (State::Paths::const_iterator p = state.paths_.begin(); - p != state.paths_.end(); ++p) { - const Node* n = p->second; - if (n->in_edge()) - node_edge_set.insert(n->in_edge()); - node_edge_set.insert(n->out_edges().begin(), n->out_edges().end()); - } - set edge_set(state.edges_.begin(), state.edges_.end()); - EXPECT_EQ(node_edge_set, edge_set); -} - -void VirtualFileSystem::Create(const string& path, - const string& contents) { - files_[path].mtime = now_; - files_[path].contents = contents; - files_created_.insert(path); -} - -TimeStamp VirtualFileSystem::Stat(const string& path, string* err) const { - FileMap::const_iterator i = files_.find(path); - if (i != files_.end()) { - *err = i->second.stat_error; - return i->second.mtime; - } - return 0; -} - -bool VirtualFileSystem::WriteFile(const string& path, const string& contents) { - Create(path, contents); - return true; -} - -bool VirtualFileSystem::MakeDir(const string& path) { - directories_made_.push_back(path); - return true; // success -} - -FileReader::Status VirtualFileSystem::ReadFile(const string& path, - string* contents, - string* err) { - files_read_.push_back(path); - FileMap::iterator i = files_.find(path); - if (i != files_.end()) { - *contents = i->second.contents; - return Okay; - } - *err = strerror(ENOENT); - return NotFound; -} - -int VirtualFileSystem::RemoveFile(const string& path) { - if (find(directories_made_.begin(), directories_made_.end(), path) - != directories_made_.end()) - return -1; - FileMap::iterator i = files_.find(path); - if (i != files_.end()) { - files_.erase(i); - files_removed_.insert(path); - return 0; - } else { - return 1; - } -} - -void ScopedTempDir::CreateAndEnter(const string& name) { - // First change into the system temp dir and save it for cleanup. - start_dir_ = GetSystemTempDir(); - if (start_dir_.empty()) - Fatal("couldn't get system temp dir"); - if (chdir(start_dir_.c_str()) < 0) - Fatal("chdir: %s", strerror(errno)); - - // Create a temporary subdirectory of that. - char name_template[1024]; - strcpy(name_template, name.c_str()); - strcat(name_template, "-XXXXXX"); - char* tempname = mkdtemp(name_template); - if (!tempname) - Fatal("mkdtemp: %s", strerror(errno)); - temp_dir_name_ = tempname; - - // chdir into the new temporary directory. - if (chdir(temp_dir_name_.c_str()) < 0) - Fatal("chdir: %s", strerror(errno)); -} - -void ScopedTempDir::Cleanup() { - if (temp_dir_name_.empty()) - return; // Something went wrong earlier. - - // Move out of the directory we're about to clobber. - if (chdir(start_dir_.c_str()) < 0) - Fatal("chdir: %s", strerror(errno)); - -#ifdef _WIN32 - string command = "rmdir /s /q " + temp_dir_name_; -#else - string command = "rm -rf " + temp_dir_name_; -#endif - if (system(command.c_str()) < 0) - Fatal("system: %s", strerror(errno)); - - temp_dir_name_.clear(); -} diff --git a/ninja/src/test.h b/ninja/src/test.h deleted file mode 100644 index 6af17b3f90a..00000000000 --- a/ninja/src/test.h +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_TEST_H_ -#define NINJA_TEST_H_ - -#include "disk_interface.h" -#include "manifest_parser.h" -#include "state.h" -#include "util.h" - -// A tiny testing framework inspired by googletest, but much simpler and -// faster to compile. It supports most things commonly used from googltest. The -// most noticeable things missing: EXPECT_* and ASSERT_* don't support -// streaming notes to them with operator<<, and for failing tests the lhs and -// rhs are not printed. That's so that this header does not have to include -// sstream, which slows down building ninja_test almost 20%. -namespace testing { -class Test { - bool failed_; - int assertion_failures_; - public: - Test() : failed_(false), assertion_failures_(0) {} - virtual ~Test() {} - virtual void SetUp() {} - virtual void TearDown() {} - virtual void Run() = 0; - - bool Failed() const { return failed_; } - int AssertionFailures() const { return assertion_failures_; } - void AddAssertionFailure() { assertion_failures_++; } - bool Check(bool condition, const char* file, int line, const char* error); -}; -} - -void RegisterTest(testing::Test* (*)(), const char*); - -extern testing::Test* g_current_test; -#define TEST_F_(x, y, name) \ - struct y : public x { \ - static testing::Test* Create() { return g_current_test = new y; } \ - virtual void Run(); \ - }; \ - struct Register##y { \ - Register##y() { RegisterTest(y::Create, name); } \ - }; \ - Register##y g_register_##y; \ - void y::Run() - -#define TEST_F(x, y) TEST_F_(x, x##y, #x "." #y) -#define TEST(x, y) TEST_F_(testing::Test, x##y, #x "." #y) - -#define EXPECT_EQ(a, b) \ - g_current_test->Check(a == b, __FILE__, __LINE__, #a " == " #b) -#define EXPECT_NE(a, b) \ - g_current_test->Check(a != b, __FILE__, __LINE__, #a " != " #b) -#define EXPECT_GT(a, b) \ - g_current_test->Check(a > b, __FILE__, __LINE__, #a " > " #b) -#define EXPECT_LT(a, b) \ - g_current_test->Check(a < b, __FILE__, __LINE__, #a " < " #b) -#define EXPECT_GE(a, b) \ - g_current_test->Check(a >= b, __FILE__, __LINE__, #a " >= " #b) -#define EXPECT_LE(a, b) \ - g_current_test->Check(a <= b, __FILE__, __LINE__, #a " <= " #b) -#define EXPECT_TRUE(a) \ - g_current_test->Check(static_cast(a), __FILE__, __LINE__, #a) -#define EXPECT_FALSE(a) \ - g_current_test->Check(!static_cast(a), __FILE__, __LINE__, #a) - -#define ASSERT_EQ(a, b) \ - if (!EXPECT_EQ(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_NE(a, b) \ - if (!EXPECT_NE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_GT(a, b) \ - if (!EXPECT_GT(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_LT(a, b) \ - if (!EXPECT_LT(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_GE(a, b) \ - if (!EXPECT_GE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_LE(a, b) \ - if (!EXPECT_LE(a, b)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_TRUE(a) \ - if (!EXPECT_TRUE(a)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_FALSE(a) \ - if (!EXPECT_FALSE(a)) { g_current_test->AddAssertionFailure(); return; } -#define ASSERT_NO_FATAL_FAILURE(a) \ - { \ - int fail_count = g_current_test->AssertionFailures(); \ - a; \ - if (fail_count != g_current_test->AssertionFailures()) { \ - g_current_test->AddAssertionFailure(); \ - return; \ - } \ - } - -// Support utilities for tests. - -struct Node; - -/// A base test fixture that includes a State object with a -/// builtin "cat" rule. -struct StateTestWithBuiltinRules : public testing::Test { - StateTestWithBuiltinRules(); - - /// Add a "cat" rule to \a state. Used by some tests; it's - /// otherwise done by the ctor to state_. - void AddCatRule(State* state); - - /// Short way to get a Node by its path from state_. - Node* GetNode(const string& path); - - State state_; -}; - -void AssertParse(State* state, const char* input, - ManifestParserOptions = ManifestParserOptions()); -void AssertHash(const char* expected, uint64_t actual); -void VerifyGraph(const State& state); - -/// An implementation of DiskInterface that uses an in-memory representation -/// of disk state. It also logs file accesses and directory creations -/// so it can be used by tests to verify disk access patterns. -struct VirtualFileSystem : public DiskInterface { - VirtualFileSystem() : now_(1) {} - - /// "Create" a file with contents. - void Create(const string& path, const string& contents); - - /// Tick "time" forwards; subsequent file operations will be newer than - /// previous ones. - int Tick() { - return ++now_; - } - - // DiskInterface - virtual TimeStamp Stat(const string& path, string* err) const; - virtual bool WriteFile(const string& path, const string& contents); - virtual bool MakeDir(const string& path); - virtual Status ReadFile(const string& path, string* contents, string* err); - virtual int RemoveFile(const string& path); - - /// An entry for a single in-memory file. - struct Entry { - int mtime; - string stat_error; // If mtime is -1. - string contents; - }; - - vector directories_made_; - vector files_read_; - typedef map FileMap; - FileMap files_; - set files_removed_; - set files_created_; - - /// A simple fake timestamp for file operations. - int now_; -}; - -struct ScopedTempDir { - /// Create a temporary directory and chdir into it. - void CreateAndEnter(const string& name); - - /// Clean up the temporary directory. - void Cleanup(); - - /// The temp directory containing our dir. - string start_dir_; - /// The subdirectory name for our dir, or empty if it hasn't been set up. - string temp_dir_name_; -}; - -#endif // NINJA_TEST_H_ diff --git a/ninja/src/timestamp.h b/ninja/src/timestamp.h deleted file mode 100644 index 6a7ccd0b068..00000000000 --- a/ninja/src/timestamp.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_TIMESTAMP_H_ -#define NINJA_TIMESTAMP_H_ - -#ifdef _WIN32 -#include "win32port.h" -#else -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS -#endif -#include -#endif - -// When considering file modification times we only care to compare -// them against one another -- we never convert them to an absolute -// real time. On POSIX we use timespec (seconds&nanoseconds since epoch) -// and on Windows we use a different value. Both fit in an int64. -typedef int64_t TimeStamp; - -#endif // NINJA_TIMESTAMP_H_ diff --git a/ninja/src/util.cc b/ninja/src/util.cc deleted file mode 100644 index faf43cc3c96..00000000000 --- a/ninja/src/util.cc +++ /dev/null @@ -1,684 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "util.h" - -#ifdef __CYGWIN__ -#include -#include -#elif defined( _WIN32) -#include -#include -#include -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#ifndef _WIN32 -#include -#include -#endif - -#include - -#if defined(__APPLE__) || defined(__FreeBSD__) -#include -#elif defined(__SVR4) && defined(__sun) -#include -#include -#elif defined(_AIX) -#include -#elif defined(linux) || defined(__GLIBC__) -#include -#endif - -#include "edit_distance.h" -#include "metrics.h" - - -AnsiType GetAnsiType(){ - static int initialized = -1; - if(initialized != -1){ - return (AnsiType) initialized; - } else { - const char* tmp = getenv("NINJA_ANSI_FORCED"); - if(tmp) { - if(strcmp(tmp,"0") == 0 || strcmp(tmp,"false") == 0){ - initialized = COLOR_NO; - } else { - initialized = COLOR_FORCE; - } - } else { - initialized = COLOR_UNKOWN; - } - return (AnsiType) initialized; - } -} - -bool ShouldBeColorFul(bool terminal){ - switch(GetAnsiType()){ - case COLOR_NO: return false; - case COLOR_FORCE: return true; - default: return terminal; - } -} - -bool IgnoreGenerator(){ - static int initialized = -1; - if(initialized != -1){ - return initialized == 0; - } else { - const char* tmp = getenv("NINJA_IGNORE_GENERATOR"); - if(tmp){ - if(strcmp(tmp,"0")==0 || strcmp(tmp,"false")==0){ - initialized = 1; - } else { - initialized = 0; - } - } else { - initialized = 1; - } - return initialized == 0; - } -} -void Fatal(const char* msg, ...) { - va_list ap; - if (GetAnsiType()){ - fprintf(stderr, "\x1b[31m" "ninja: fatal: " "\x1b[0m"); - } else { - fprintf(stderr, "ninja: fatal: "); - } - - va_start(ap, msg); - vfprintf(stderr, msg, ap); - va_end(ap); - fprintf(stderr, "\n"); -#ifdef _WIN32 - // On Windows, some tools may inject extra threads. - // exit() may block on locks held by those threads, so forcibly exit. - fflush(stderr); - fflush(stdout); - ExitProcess(1); -#else - exit(1); -#endif -} - -void Warning(const char* msg, ...) { - va_list ap; - if (GetAnsiType()){ - fprintf(stderr, "\x1b[31m" "ninja: warning: " "\x1b[0m"); - } else { - fprintf(stderr, "ninja: warning: "); - } - va_start(ap, msg); - vfprintf(stderr, msg, ap); - va_end(ap); - fprintf(stderr, "\n"); -} - -void Error(const char* msg, ...) { - va_list ap; - if (GetAnsiType()){ - fprintf(stderr, "\x1b[31m" "rescript: error: " "\x1b[0m"); - } else { - fprintf(stderr, "rescript: error: "); - } - va_start(ap, msg); - vfprintf(stderr, msg, ap); - va_end(ap); - fprintf(stderr, "\n"); -} - -bool CanonicalizePath(string* path, uint64_t* slash_bits, string* err) { - METRIC_RECORD("canonicalize str"); - size_t len = path->size(); - char* str = 0; - if (len > 0) - str = &(*path)[0]; - if (!CanonicalizePath(str, &len, slash_bits, err)) - return false; - path->resize(len); - return true; -} - -static bool IsPathSeparator(char c) { -#ifdef _WIN32 - return c == '/' || c == '\\'; -#else - return c == '/'; -#endif -} - -bool CanonicalizePath(char* path, size_t* len, uint64_t* slash_bits, - string* err) { - // WARNING: this function is performance-critical; please benchmark - // any changes you make to it. - METRIC_RECORD("canonicalize path"); - if (*len == 0) { - *err = "empty path"; - return false; - } - - const int kMaxPathComponents = 60; - char* components[kMaxPathComponents]; - int component_count = 0; - - char* start = path; - char* dst = start; - const char* src = start; - const char* end = start + *len; - - if (IsPathSeparator(*src)) { -#ifdef _WIN32 - - // network path starts with // - if (*len > 1 && IsPathSeparator(*(src + 1))) { - src += 2; - dst += 2; - } else { - ++src; - ++dst; - } -#else - ++src; - ++dst; -#endif - } - - while (src < end) { - if (*src == '.') { - if (src + 1 == end || IsPathSeparator(src[1])) { - // '.' component; eliminate. - src += 2; - continue; - } else if (src[1] == '.' && (src + 2 == end || IsPathSeparator(src[2]))) { - // '..' component. Back up if possible. - if (component_count > 0) { - dst = components[component_count - 1]; - src += 3; - --component_count; - } else { - *dst++ = *src++; - *dst++ = *src++; - *dst++ = *src++; - } - continue; - } - } - - if (IsPathSeparator(*src)) { - src++; - continue; - } - - if (component_count == kMaxPathComponents) - Fatal("path has too many components : %s", path); - components[component_count] = dst; - ++component_count; - - while (src != end && !IsPathSeparator(*src)) - *dst++ = *src++; - *dst++ = *src++; // Copy '/' or final \0 character as well. - } - - if (dst == start) { - *dst++ = '.'; - *dst++ = '\0'; - } - - *len = dst - start - 1; -#ifdef _WIN32 - uint64_t bits = 0; - uint64_t bits_mask = 1; - - for (char* c = start; c < start + *len; ++c) { - switch (*c) { - case '\\': - bits |= bits_mask; - *c = '/'; - NINJA_FALLTHROUGH; - case '/': - bits_mask <<= 1; - } - } - - *slash_bits = bits; -#else - *slash_bits = 0; -#endif - return true; -} - -static inline bool IsKnownShellSafeCharacter(char ch) { - if ('A' <= ch && ch <= 'Z') return true; - if ('a' <= ch && ch <= 'z') return true; - if ('0' <= ch && ch <= '9') return true; - - switch (ch) { - case '_': - case '+': - case '-': - case '.': - case '/': - return true; - default: - return false; - } -} - -static inline bool IsKnownWin32SafeCharacter(char ch) { - switch (ch) { - case ' ': - case '"': - return false; - default: - return true; - } -} - -static inline bool StringNeedsShellEscaping(const string& input) { - for (size_t i = 0; i < input.size(); ++i) { - if (!IsKnownShellSafeCharacter(input[i])) return true; - } - return false; -} - -static inline bool StringNeedsWin32Escaping(const string& input) { - for (size_t i = 0; i < input.size(); ++i) { - if (!IsKnownWin32SafeCharacter(input[i])) return true; - } - return false; -} - -void GetShellEscapedString(const string& input, string* result) { - assert(result); - - if (!StringNeedsShellEscaping(input)) { - result->append(input); - return; - } - - const char kQuote = '\''; - const char kEscapeSequence[] = "'\\'"; - - result->push_back(kQuote); - - string::const_iterator span_begin = input.begin(); - for (string::const_iterator it = input.begin(), end = input.end(); it != end; - ++it) { - if (*it == kQuote) { - result->append(span_begin, it); - result->append(kEscapeSequence); - span_begin = it; - } - } - result->append(span_begin, input.end()); - result->push_back(kQuote); -} - - -void GetWin32EscapedString(const string& input, string* result) { - assert(result); - if (!StringNeedsWin32Escaping(input)) { - result->append(input); - return; - } - - const char kQuote = '"'; - const char kBackslash = '\\'; - - result->push_back(kQuote); - size_t consecutive_backslash_count = 0; - string::const_iterator span_begin = input.begin(); - for (string::const_iterator it = input.begin(), end = input.end(); it != end; - ++it) { - switch (*it) { - case kBackslash: - ++consecutive_backslash_count; - break; - case kQuote: - result->append(span_begin, it); - result->append(consecutive_backslash_count + 1, kBackslash); - span_begin = it; - consecutive_backslash_count = 0; - break; - default: - consecutive_backslash_count = 0; - break; - } - } - result->append(span_begin, input.end()); - result->append(consecutive_backslash_count, kBackslash); - result->push_back(kQuote); -} - -int ReadFile(const string& path, string* contents, string* err) { -#ifdef _WIN32 - // This makes a ninja run on a set of 1500 manifest files about 4% faster - // than using the generic fopen code below. - err->clear(); - HANDLE f = ::CreateFileA(path.c_str(), GENERIC_READ, FILE_SHARE_READ, NULL, - OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL); - if (f == INVALID_HANDLE_VALUE) { - err->assign(GetLastErrorString()); - return -ENOENT; - } - - for (;;) { - DWORD len; - char buf[64 << 10]; - if (!::ReadFile(f, buf, sizeof(buf), &len, NULL)) { - err->assign(GetLastErrorString()); - contents->clear(); - return -1; - } - if (len == 0) - break; - contents->append(buf, len); - } - ::CloseHandle(f); - return 0; -#else - FILE* f = fopen(path.c_str(), "rb"); - if (!f) { - err->assign(strerror(errno)); - return -errno; - } - - struct stat st; - if (fstat(fileno(f), &st) < 0) { - err->assign(strerror(errno)); - fclose(f); - return -errno; - } - - // +1 is for the resize in ManifestParser::Load - contents->reserve(st.st_size + 1); - - char buf[64 << 10]; - size_t len; - while (!feof(f) && (len = fread(buf, 1, sizeof(buf), f)) > 0) { - contents->append(buf, len); - } - if (ferror(f)) { - err->assign(strerror(errno)); // XXX errno? - contents->clear(); - fclose(f); - return -errno; - } - fclose(f); - return 0; -#endif -} - -void SetCloseOnExec(int fd) { -#ifndef _WIN32 - int flags = fcntl(fd, F_GETFD); - if (flags < 0) { - perror("fcntl(F_GETFD)"); - } else { - if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) - perror("fcntl(F_SETFD)"); - } -#else - HANDLE hd = (HANDLE) _get_osfhandle(fd); - if (! SetHandleInformation(hd, HANDLE_FLAG_INHERIT, 0)) { - fprintf(stderr, "SetHandleInformation(): %s", GetLastErrorString().c_str()); - } -#endif // ! _WIN32 -} - - -const char* SpellcheckStringV(const string& text, - const vector& words) { - const bool kAllowReplacements = true; - const int kMaxValidEditDistance = 3; - - int min_distance = kMaxValidEditDistance + 1; - const char* result = NULL; - for (vector::const_iterator i = words.begin(); - i != words.end(); ++i) { - int distance = EditDistance(*i, text, kAllowReplacements, - kMaxValidEditDistance); - if (distance < min_distance) { - min_distance = distance; - result = *i; - } - } - return result; -} - -const char* SpellcheckString(const char* text, ...) { - // Note: This takes a const char* instead of a string& because using - // va_start() with a reference parameter is undefined behavior. - va_list ap; - va_start(ap, text); - vector words; - const char* word; - while ((word = va_arg(ap, const char*))) - words.push_back(word); - va_end(ap); - return SpellcheckStringV(text, words); -} - -#ifdef _WIN32 -string GetLastErrorString() { - DWORD err = GetLastError(); - - char* msg_buf; - FormatMessageA( - FORMAT_MESSAGE_ALLOCATE_BUFFER | - FORMAT_MESSAGE_FROM_SYSTEM | - FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, - err, - MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - (char*)&msg_buf, - 0, - NULL); - string msg = msg_buf; - LocalFree(msg_buf); - return msg; -} - -void Win32Fatal(const char* function, const char* hint) { - if (hint) { - Fatal("%s: %s (%s)", function, GetLastErrorString().c_str(), hint); - } else { - Fatal("%s: %s", function, GetLastErrorString().c_str()); - } -} -#endif - -bool islatinalpha(int c) { - // isalpha() is locale-dependent. - return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); -} - -string StripAnsiEscapeCodes(const string& in) { - string stripped; - stripped.reserve(in.size()); - - for (size_t i = 0; i < in.size(); ++i) { - if (in[i] != '\33') { - // Not an escape code. - stripped.push_back(in[i]); - continue; - } - - // Only strip CSIs for now. - if (i + 1 >= in.size()) break; - if (in[i + 1] != '[') continue; // Not a CSI. - i += 2; - - // Skip everything up to and including the next [a-zA-Z]. - while (i < in.size() && !islatinalpha(in[i])) - ++i; - } - return stripped; -} - -int GetProcessorCount() { -#ifdef _WIN32 - SYSTEM_INFO info; - GetNativeSystemInfo(&info); - return info.dwNumberOfProcessors; -#else -#ifdef CPU_COUNT - // The number of exposed processors might not represent the actual number of - // processors threads can run on. This happens when a CPU set limitation is - // active, see https://github.com/ninja-build/ninja/issues/1278 - cpu_set_t set; - if (sched_getaffinity(getpid(), sizeof(set), &set) == 0) { - return CPU_COUNT(&set); - } -#endif - return sysconf(_SC_NPROCESSORS_ONLN); -#endif -} - -#if defined(_WIN32) || defined(__CYGWIN__) -static double CalculateProcessorLoad(uint64_t idle_ticks, uint64_t total_ticks) -{ - static uint64_t previous_idle_ticks = 0; - static uint64_t previous_total_ticks = 0; - static double previous_load = -0.0; - - uint64_t idle_ticks_since_last_time = idle_ticks - previous_idle_ticks; - uint64_t total_ticks_since_last_time = total_ticks - previous_total_ticks; - - bool first_call = (previous_total_ticks == 0); - bool ticks_not_updated_since_last_call = (total_ticks_since_last_time == 0); - - double load; - if (first_call || ticks_not_updated_since_last_call) { - load = previous_load; - } else { - // Calculate load. - double idle_to_total_ratio = - ((double)idle_ticks_since_last_time) / total_ticks_since_last_time; - double load_since_last_call = 1.0 - idle_to_total_ratio; - - // Filter/smooth result when possible. - if(previous_load > 0) { - load = 0.9 * previous_load + 0.1 * load_since_last_call; - } else { - load = load_since_last_call; - } - } - - previous_load = load; - previous_total_ticks = total_ticks; - previous_idle_ticks = idle_ticks; - - return load; -} - -static uint64_t FileTimeToTickCount(const FILETIME & ft) -{ - uint64_t high = (((uint64_t)(ft.dwHighDateTime)) << 32); - uint64_t low = ft.dwLowDateTime; - return (high | low); -} - -double GetLoadAverage() { - FILETIME idle_time, kernel_time, user_time; - BOOL get_system_time_succeeded = - GetSystemTimes(&idle_time, &kernel_time, &user_time); - - double posix_compatible_load; - if (get_system_time_succeeded) { - uint64_t idle_ticks = FileTimeToTickCount(idle_time); - - // kernel_time from GetSystemTimes already includes idle_time. - uint64_t total_ticks = - FileTimeToTickCount(kernel_time) + FileTimeToTickCount(user_time); - - double processor_load = CalculateProcessorLoad(idle_ticks, total_ticks); - posix_compatible_load = processor_load * GetProcessorCount(); - - } else { - posix_compatible_load = -0.0; - } - - return posix_compatible_load; -} -#elif defined(_AIX) -double GetLoadAverage() { - perfstat_cpu_total_t cpu_stats; - if (perfstat_cpu_total(NULL, &cpu_stats, sizeof(cpu_stats), 1) < 0) { - return -0.0f; - } - - // Calculation taken from comment in libperfstats.h - return double(cpu_stats.loadavg[0]) / double(1 << SBITS); -} -#elif defined(__UCLIBC__) -double GetLoadAverage() { - struct sysinfo si; - if (sysinfo(&si) != 0) - return -0.0f; - return 1.0 / (1 << SI_LOAD_SHIFT) * si.loads[0]; -} -#else -double GetLoadAverage() { - double loadavg[3] = { 0.0f, 0.0f, 0.0f }; - if (getloadavg(loadavg, 3) < 0) { - // Maybe we should return an error here or the availability of - // getloadavg(3) should be checked when ninja is configured. - return -0.0f; - } - return loadavg[0]; -} -#endif // _WIN32 - -string ElideMiddle(const string& str, size_t width) { - const int kMargin = 3; // Space for "...". - string result = str; - if (result.size() > width) { - size_t elide_size = (width - kMargin) / 2; - result = result.substr(0, elide_size) - + "..." - + result.substr(result.size() - elide_size, elide_size); - } - return result; -} - -bool Truncate(const string& path, size_t size, string* err) { -#ifdef _WIN32 - int fh = _sopen(path.c_str(), _O_RDWR | _O_CREAT, _SH_DENYNO, - _S_IREAD | _S_IWRITE); - int success = _chsize(fh, size); - _close(fh); -#else - int success = truncate(path.c_str(), size); -#endif - // Both truncate() and _chsize() return 0 on success and set errno and return - // -1 on failure. - if (success < 0) { - *err = strerror(errno); - return false; - } - return true; -} - diff --git a/ninja/src/util.h b/ninja/src/util.h deleted file mode 100644 index ac5a08de0ac..00000000000 --- a/ninja/src/util.h +++ /dev/null @@ -1,133 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_UTIL_H_ -#define NINJA_UTIL_H_ - -#ifdef _WIN32 -#include "win32port.h" -#else -#include -#endif - -#include -#include -using namespace std; - -#ifdef _MSC_VER -#define NORETURN __declspec(noreturn) -#else -#define NORETURN __attribute__((noreturn)) -#endif - -/// Log a fatal message and exit. -NORETURN void Fatal(const char* msg, ...); - -// Have a generic fall-through for different versions of C/C++. -#if defined(__cplusplus) && __cplusplus >= 201703L -#define NINJA_FALLTHROUGH [[fallthrough]] -#elif defined(__cplusplus) && __cplusplus >= 201103L && defined(__clang__) -#define NINJA_FALLTHROUGH [[clang::fallthrough]] -#elif defined(__cplusplus) && __cplusplus >= 201103L && defined(__GNUC__) && \ - __GNUC__ >= 7 -#define NINJA_FALLTHROUGH [[gnu::fallthrough]] -#elif defined(__GNUC__) && __GNUC__ >= 7 // gcc 7 -#define NINJA_FALLTHROUGH __attribute__ ((fallthrough)) -#else // C++11 on gcc 6, and all other cases -#define NINJA_FALLTHROUGH -#endif - -/// Log a warning message. -void Warning(const char* msg, ...); - -/// Log an error message. -void Error(const char* msg, ...); - -/// Canonicalize a path like "foo/../bar.h" into just "bar.h". -/// |slash_bits| has bits set starting from lowest for a backslash that was -/// normalized to a forward slash. (only used on Windows) -bool CanonicalizePath(string* path, uint64_t* slash_bits, string* err); -bool CanonicalizePath(char* path, size_t* len, uint64_t* slash_bits, - string* err); - -/// Appends |input| to |*result|, escaping according to the whims of either -/// Bash, or Win32's CommandLineToArgvW(). -/// Appends the string directly to |result| without modification if we can -/// determine that it contains no problematic characters. -void GetShellEscapedString(const string& input, string* result); -void GetWin32EscapedString(const string& input, string* result); - -/// Read a file to a string (in text mode: with CRLF conversion -/// on Windows). -/// Returns -errno and fills in \a err on error. -int ReadFile(const string& path, string* contents, string* err); - -/// Mark a file descriptor to not be inherited on exec()s. -void SetCloseOnExec(int fd); - -/// Given a misspelled string and a list of correct spellings, returns -/// the closest match or NULL if there is no close enough match. -const char* SpellcheckStringV(const string& text, - const vector& words); - -/// Like SpellcheckStringV, but takes a NULL-terminated list. -const char* SpellcheckString(const char* text, ...); - -bool islatinalpha(int c); - -/// Removes all Ansi escape codes (http://www.termsys.demon.co.uk/vtansi.htm). -string StripAnsiEscapeCodes(const string& in); - -/// @return the number of processors on the machine. Useful for an initial -/// guess for how many jobs to run in parallel. @return 0 on error. -int GetProcessorCount(); - -/// @return the load average of the machine. A negative value is returned -/// on error. -double GetLoadAverage(); - -/// Elide the given string @a str with '...' in the middle if the length -/// exceeds @a width. -string ElideMiddle(const string& str, size_t width); - -/// Truncates a file to the given size. -bool Truncate(const string& path, size_t size, string* err); - -enum AnsiType { - COLOR_FORCE, - COLOR_NO, - COLOR_UNKOWN -}; - -bool ShouldBeColorFul(bool terminal); -bool IgnoreGenerator(); -#ifdef _MSC_VER -#define snprintf _snprintf -#define fileno _fileno -#define unlink _unlink -#define chdir _chdir -#define strtoull _strtoui64 -#define getcwd _getcwd -#define PATH_MAX _MAX_PATH -#endif - -#ifdef _WIN32 -/// Convert the value returned by GetLastError() into a string. -string GetLastErrorString(); - -/// Calls Fatal() with a function name and GetLastErrorString. -NORETURN void Win32Fatal(const char* function, const char* hint = NULL); -#endif - -#endif // NINJA_UTIL_H_ diff --git a/ninja/src/util_test.cc b/ninja/src/util_test.cc deleted file mode 100644 index d97b48ccc25..00000000000 --- a/ninja/src/util_test.cc +++ /dev/null @@ -1,430 +0,0 @@ -// Copyright 2011 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "util.h" - -#include "test.h" - -namespace { - -bool CanonicalizePath(string* path, string* err) { - uint64_t unused; - return ::CanonicalizePath(path, &unused, err); -} - -} // namespace - -TEST(CanonicalizePath, PathSamples) { - string path; - string err; - - EXPECT_FALSE(CanonicalizePath(&path, &err)); - EXPECT_EQ("empty path", err); - - path = "foo.h"; err = ""; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo.h", path); - - path = "./foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo.h", path); - - path = "./foo/./bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/bar.h", path); - - path = "./x/foo/../bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("x/bar.h", path); - - path = "./x/foo/../../bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("bar.h", path); - - path = "foo//bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/bar", path); - - path = "foo//.//..///bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("bar", path); - - path = "./x/../foo/../../bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("../bar.h", path); - - path = "foo/./."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo", path); - - path = "foo/bar/.."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo", path); - - path = "foo/.hidden_bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/.hidden_bar", path); - - path = "/foo"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("/foo", path); - - path = "//foo"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); -#ifdef _WIN32 - EXPECT_EQ("//foo", path); -#else - EXPECT_EQ("/foo", path); -#endif - - path = "/"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("", path); - - path = "/foo/.."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("", path); - - path = "."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ(".", path); - - path = "./."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ(".", path); - - path = "foo/.."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ(".", path); -} - -#ifdef _WIN32 -TEST(CanonicalizePath, PathSamplesWindows) { - string path; - string err; - - EXPECT_FALSE(CanonicalizePath(&path, &err)); - EXPECT_EQ("empty path", err); - - path = "foo.h"; err = ""; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo.h", path); - - path = ".\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo.h", path); - - path = ".\\foo\\.\\bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/bar.h", path); - - path = ".\\x\\foo\\..\\bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("x/bar.h", path); - - path = ".\\x\\foo\\..\\..\\bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("bar.h", path); - - path = "foo\\\\bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/bar", path); - - path = "foo\\\\.\\\\..\\\\\\bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("bar", path); - - path = ".\\x\\..\\foo\\..\\..\\bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("../bar.h", path); - - path = "foo\\.\\."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo", path); - - path = "foo\\bar\\.."; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo", path); - - path = "foo\\.hidden_bar"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("foo/.hidden_bar", path); - - path = "\\foo"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("/foo", path); - - path = "\\\\foo"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("//foo", path); - - path = "\\"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("", path); -} - -TEST(CanonicalizePath, SlashTracking) { - string path; - string err; - uint64_t slash_bits; - - path = "foo.h"; err = ""; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("foo.h", path); - EXPECT_EQ(0, slash_bits); - - path = "a\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a/bcd/efh\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/bcd/efh/foo.h", path); - EXPECT_EQ(4, slash_bits); - - path = "a\\bcd/efh\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/bcd/efh/foo.h", path); - EXPECT_EQ(5, slash_bits); - - path = "a\\bcd\\efh\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/bcd/efh/foo.h", path); - EXPECT_EQ(7, slash_bits); - - path = "a/bcd/efh/foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/bcd/efh/foo.h", path); - EXPECT_EQ(0, slash_bits); - - path = "a\\./efh\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/efh/foo.h", path); - EXPECT_EQ(3, slash_bits); - - path = "a\\../efh\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("efh/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a\\b\\c\\d\\e\\f\\g\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/b/c/d/e/f/g/foo.h", path); - EXPECT_EQ(127, slash_bits); - - path = "a\\b\\c\\..\\..\\..\\g\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("g/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a\\b/c\\../../..\\g\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("g/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a\\b/c\\./../..\\g\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/g/foo.h", path); - EXPECT_EQ(3, slash_bits); - - path = "a\\b/c\\./../..\\g/foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/g/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a\\\\\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/foo.h", path); - EXPECT_EQ(1, slash_bits); - - path = "a/\\\\foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/foo.h", path); - EXPECT_EQ(0, slash_bits); - - path = "a\\//foo.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ("a/foo.h", path); - EXPECT_EQ(1, slash_bits); -} - -TEST(CanonicalizePath, CanonicalizeNotExceedingLen) { - // Make sure searching \/ doesn't go past supplied len. - char buf[] = "foo/bar\\baz.h\\"; // Last \ past end. - uint64_t slash_bits; - string err; - size_t size = 13; - EXPECT_TRUE(::CanonicalizePath(buf, &size, &slash_bits, &err)); - EXPECT_EQ(0, strncmp("foo/bar/baz.h", buf, size)); - EXPECT_EQ(2, slash_bits); // Not including the trailing one. -} - -TEST(CanonicalizePath, TooManyComponents) { - string path; - string err; - uint64_t slash_bits; - - // 64 is OK. - path = "a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./" - "a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./x.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0x0); - - // Backslashes version. - path = - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\x.h"; - - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0xffffffff); - - // 65 is OK if #component is less than 60 after path canonicalization. - err = ""; - path = "a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./" - "a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./a/./x/y.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0x0); - - // Backslashes version. - err = ""; - path = - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\" - "a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\a\\.\\x\\y.h"; - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0x1ffffffff); - - - // 59 after canonicalization is OK. - err = ""; - path = "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/" - "a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/a/x/y.h"; - EXPECT_EQ(58, std::count(path.begin(), path.end(), '/')); - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0x0); - - // Backslashes version. - err = ""; - path = - "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" - "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" - "a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\a\\" - "a\\a\\a\\a\\a\\a\\a\\a\\a\\x\\y.h"; - EXPECT_EQ(58, std::count(path.begin(), path.end(), '\\')); - EXPECT_TRUE(CanonicalizePath(&path, &slash_bits, &err)); - EXPECT_EQ(slash_bits, 0x3ffffffffffffff); -} -#endif - -TEST(CanonicalizePath, UpDir) { - string path, err; - path = "../../foo/bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("../../foo/bar.h", path); - - path = "test/../../foo/bar.h"; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("../foo/bar.h", path); -} - -TEST(CanonicalizePath, AbsolutePath) { - string path = "/usr/include/stdio.h"; - string err; - EXPECT_TRUE(CanonicalizePath(&path, &err)); - EXPECT_EQ("/usr/include/stdio.h", path); -} - -TEST(CanonicalizePath, NotNullTerminated) { - string path; - string err; - size_t len; - uint64_t unused; - - path = "foo/. bar/."; - len = strlen("foo/."); // Canonicalize only the part before the space. - EXPECT_TRUE(CanonicalizePath(&path[0], &len, &unused, &err)); - EXPECT_EQ(strlen("foo"), len); - EXPECT_EQ("foo/. bar/.", string(path)); - - path = "foo/../file bar/."; - len = strlen("foo/../file"); - EXPECT_TRUE(CanonicalizePath(&path[0], &len, &unused, &err)); - EXPECT_EQ(strlen("file"), len); - EXPECT_EQ("file ./file bar/.", string(path)); -} - -TEST(PathEscaping, TortureTest) { - string result; - - GetWin32EscapedString("foo bar\\\"'$@d!st!c'\\path'\\", &result); - EXPECT_EQ("\"foo bar\\\\\\\"'$@d!st!c'\\path'\\\\\"", result); - result.clear(); - - GetShellEscapedString("foo bar\"/'$@d!st!c'/path'", &result); - EXPECT_EQ("'foo bar\"/'\\''$@d!st!c'\\''/path'\\'''", result); -} - -TEST(PathEscaping, SensiblePathsAreNotNeedlesslyEscaped) { - const char* path = "some/sensible/path/without/crazy/characters.c++"; - string result; - - GetWin32EscapedString(path, &result); - EXPECT_EQ(path, result); - result.clear(); - - GetShellEscapedString(path, &result); - EXPECT_EQ(path, result); -} - -TEST(PathEscaping, SensibleWin32PathsAreNotNeedlesslyEscaped) { - const char* path = "some\\sensible\\path\\without\\crazy\\characters.c++"; - string result; - - GetWin32EscapedString(path, &result); - EXPECT_EQ(path, result); -} - -TEST(StripAnsiEscapeCodes, EscapeAtEnd) { - string stripped = StripAnsiEscapeCodes("foo\33"); - EXPECT_EQ("foo", stripped); - - stripped = StripAnsiEscapeCodes("foo\33["); - EXPECT_EQ("foo", stripped); -} - -TEST(StripAnsiEscapeCodes, StripColors) { - // An actual clang warning. - string input = "\33[1maffixmgr.cxx:286:15: \33[0m\33[0;1;35mwarning: " - "\33[0m\33[1musing the result... [-Wparentheses]\33[0m"; - string stripped = StripAnsiEscapeCodes(input); - EXPECT_EQ("affixmgr.cxx:286:15: warning: using the result... [-Wparentheses]", - stripped); -} - -TEST(ElideMiddle, NothingToElide) { - string input = "Nothing to elide in this short string."; - EXPECT_EQ(input, ElideMiddle(input, 80)); - EXPECT_EQ(input, ElideMiddle(input, 38)); -} - -TEST(ElideMiddle, ElideInTheMiddle) { - string input = "01234567890123456789"; - string elided = ElideMiddle(input, 10); - EXPECT_EQ("012...789", elided); - EXPECT_EQ("01234567...23456789", ElideMiddle(input, 19)); -} diff --git a/ninja/src/version.cc b/ninja/src/version.cc deleted file mode 100644 index d0b52ead4b5..00000000000 --- a/ninja/src/version.cc +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "version.h" - -#include - -#include "util.h" - -const char* kNinjaVersion = "1.9.0.git"; -#if 0 -void ParseVersion(const string& version, int* major, int* minor) { - size_t end = version.find('.'); - *major = atoi(version.substr(0, end).c_str()); - *minor = 0; - if (end != string::npos) { - size_t start = end + 1; - end = version.find('.', start); - *minor = atoi(version.substr(start, end).c_str()); - } -} - -void CheckNinjaVersion(const string& version) { - int bin_major, bin_minor; - ParseVersion(kNinjaVersion, &bin_major, &bin_minor); - int file_major, file_minor; - ParseVersion(version, &file_major, &file_minor); - - if (bin_major > file_major) { - Warning("ninja executable version (%s) greater than build file " - "ninja_required_version (%s); versions may be incompatible.", - kNinjaVersion, version.c_str()); - return; - } - - if ((bin_major == file_major && bin_minor < file_minor) || - bin_major < file_major) { - Fatal("ninja version (%s) incompatible with build file " - "ninja_required_version version (%s).", - kNinjaVersion, version.c_str()); - } -} -#endif \ No newline at end of file diff --git a/ninja/src/version.h b/ninja/src/version.h deleted file mode 100644 index 5a8b1af68ac..00000000000 --- a/ninja/src/version.h +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2013 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_VERSION_H_ -#define NINJA_VERSION_H_ - -#include -using namespace std; - -/// The version number of the current Ninja release. This will always -/// be "git" on trunk. -extern const char* kNinjaVersion; -#if 0 -/// Parse the major/minor components of a version string. -void ParseVersion(const string& version, int* major, int* minor); - -/// Check whether \a version is compatible with the current Ninja version, -/// aborting if not. -void CheckNinjaVersion(const string& required_version); -#endif -#endif // NINJA_VERSION_H_ diff --git a/ninja/src/win32port.h b/ninja/src/win32port.h deleted file mode 100644 index e542536cc76..00000000000 --- a/ninja/src/win32port.h +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2012 Google Inc. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef NINJA_WIN32PORT_H_ -#define NINJA_WIN32PORT_H_ - -#if defined(__MINGW32__) || defined(__MINGW64__) -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS -#endif -#include -#endif - -typedef signed short int16_t; -typedef unsigned short uint16_t; -/// A 64-bit integer type -typedef signed long long int64_t; -typedef unsigned long long uint64_t; - -// printf format specifier for uint64_t, from C99. -#ifndef PRIu64 -#define PRId64 "I64d" -#define PRIu64 "I64u" -#define PRIx64 "I64x" -#endif - -#endif // NINJA_WIN32PORT_H_ - diff --git a/package.json b/package.json index b0c9dd25b3f..92eed3b3366 100644 --- a/package.json +++ b/package.json @@ -43,14 +43,12 @@ }, "bin": { "bsc": "cli/bsc.js", - "bstracing": "cli/bstracing.js", "rescript": "cli/rescript.js", - "rescript-legacy": "cli/rescript-legacy.js", "rescript-tools": "cli/rescript-tools.js" }, "scripts": { "test": "node scripts/test.js -all", - "test-bsb": "node scripts/test.js -bsb", + "test-build": "node scripts/test.js -build", "test-ocaml": "node scripts/test.js -ounit", "check": "biome check --no-errors-on-unmatched .", "check:all": "biome check .", @@ -64,7 +62,6 @@ "COPYING", "COPYING.LESSER", "CREDITS.md", - "ninja.COPYING", "docs/docson/build-schema.json", "cli" ], diff --git a/packages/@rescript/darwin-arm64/bin.d.ts b/packages/@rescript/darwin-arm64/bin.d.ts index e64a217712d..f6fa8daaca5 100644 --- a/packages/@rescript/darwin-arm64/bin.d.ts +++ b/packages/@rescript/darwin-arm64/bin.d.ts @@ -3,10 +3,7 @@ export const binDir: string; export const binPaths: BinaryPaths; export type BinaryPaths = { - bsb_helper_exe: string; bsc_exe: string; - ninja_exe: string; - rescript_legacy_exe: string; rescript_tools_exe: string; rescript_editor_analysis_exe: string; rescript_exe: string; diff --git a/packages/@rescript/darwin-arm64/bin.js b/packages/@rescript/darwin-arm64/bin.js index 7b6f5bed04e..aff7c9c9d93 100644 --- a/packages/@rescript/darwin-arm64/bin.js +++ b/packages/@rescript/darwin-arm64/bin.js @@ -5,10 +5,7 @@ import * as path from "node:path"; export const binDir = path.join(import.meta.dirname, "bin"); export const binPaths = { - bsb_helper_exe: path.join(binDir, "bsb_helper.exe"), bsc_exe: path.join(binDir, "bsc.exe"), - ninja_exe: path.join(binDir, "ninja.exe"), - rescript_legacy_exe: path.join(binDir, "rescript-legacy.exe"), rescript_tools_exe: path.join(binDir, "rescript-tools.exe"), rescript_editor_analysis_exe: path.join( binDir, diff --git a/packages/@rescript/darwin-arm64/package.json b/packages/@rescript/darwin-arm64/package.json index c848f1650dd..3b2cf1a1e04 100644 --- a/packages/@rescript/darwin-arm64/package.json +++ b/packages/@rescript/darwin-arm64/package.json @@ -30,10 +30,7 @@ "access": "public", "provenance": true, "executableFiles": [ - "./bin/bsb_helper.exe", "./bin/bsc.exe", - "./bin/ninja.exe", - "./bin/rescript-legacy.exe", "./bin/rescript-editor-analysis.exe", "./bin/rescript-tools.exe", "./bin/rescript.exe" diff --git a/packages/@rescript/darwin-x64/bin.d.ts b/packages/@rescript/darwin-x64/bin.d.ts index e64a217712d..f6fa8daaca5 100644 --- a/packages/@rescript/darwin-x64/bin.d.ts +++ b/packages/@rescript/darwin-x64/bin.d.ts @@ -3,10 +3,7 @@ export const binDir: string; export const binPaths: BinaryPaths; export type BinaryPaths = { - bsb_helper_exe: string; bsc_exe: string; - ninja_exe: string; - rescript_legacy_exe: string; rescript_tools_exe: string; rescript_editor_analysis_exe: string; rescript_exe: string; diff --git a/packages/@rescript/darwin-x64/bin.js b/packages/@rescript/darwin-x64/bin.js index 7b6f5bed04e..aff7c9c9d93 100644 --- a/packages/@rescript/darwin-x64/bin.js +++ b/packages/@rescript/darwin-x64/bin.js @@ -5,10 +5,7 @@ import * as path from "node:path"; export const binDir = path.join(import.meta.dirname, "bin"); export const binPaths = { - bsb_helper_exe: path.join(binDir, "bsb_helper.exe"), bsc_exe: path.join(binDir, "bsc.exe"), - ninja_exe: path.join(binDir, "ninja.exe"), - rescript_legacy_exe: path.join(binDir, "rescript-legacy.exe"), rescript_tools_exe: path.join(binDir, "rescript-tools.exe"), rescript_editor_analysis_exe: path.join( binDir, diff --git a/packages/@rescript/darwin-x64/package.json b/packages/@rescript/darwin-x64/package.json index 813fb778791..06e9f65d80d 100644 --- a/packages/@rescript/darwin-x64/package.json +++ b/packages/@rescript/darwin-x64/package.json @@ -30,10 +30,7 @@ "access": "public", "provenance": true, "executableFiles": [ - "./bin/bsb_helper.exe", "./bin/bsc.exe", - "./bin/ninja.exe", - "./bin/rescript-legacy.exe", "./bin/rescript-editor-analysis.exe", "./bin/rescript-tools.exe", "./bin/rescript.exe" diff --git a/packages/@rescript/linux-arm64/bin.d.ts b/packages/@rescript/linux-arm64/bin.d.ts index e64a217712d..f6fa8daaca5 100644 --- a/packages/@rescript/linux-arm64/bin.d.ts +++ b/packages/@rescript/linux-arm64/bin.d.ts @@ -3,10 +3,7 @@ export const binDir: string; export const binPaths: BinaryPaths; export type BinaryPaths = { - bsb_helper_exe: string; bsc_exe: string; - ninja_exe: string; - rescript_legacy_exe: string; rescript_tools_exe: string; rescript_editor_analysis_exe: string; rescript_exe: string; diff --git a/packages/@rescript/linux-arm64/bin.js b/packages/@rescript/linux-arm64/bin.js index 7b6f5bed04e..aff7c9c9d93 100644 --- a/packages/@rescript/linux-arm64/bin.js +++ b/packages/@rescript/linux-arm64/bin.js @@ -5,10 +5,7 @@ import * as path from "node:path"; export const binDir = path.join(import.meta.dirname, "bin"); export const binPaths = { - bsb_helper_exe: path.join(binDir, "bsb_helper.exe"), bsc_exe: path.join(binDir, "bsc.exe"), - ninja_exe: path.join(binDir, "ninja.exe"), - rescript_legacy_exe: path.join(binDir, "rescript-legacy.exe"), rescript_tools_exe: path.join(binDir, "rescript-tools.exe"), rescript_editor_analysis_exe: path.join( binDir, diff --git a/packages/@rescript/linux-arm64/package.json b/packages/@rescript/linux-arm64/package.json index 6aca498b1ae..2a7e7be2d39 100644 --- a/packages/@rescript/linux-arm64/package.json +++ b/packages/@rescript/linux-arm64/package.json @@ -30,10 +30,7 @@ "access": "public", "provenance": true, "executableFiles": [ - "./bin/bsb_helper.exe", "./bin/bsc.exe", - "./bin/ninja.exe", - "./bin/rescript-legacy.exe", "./bin/rescript-editor-analysis.exe", "./bin/rescript-tools.exe", "./bin/rescript.exe" diff --git a/packages/@rescript/linux-x64/bin.d.ts b/packages/@rescript/linux-x64/bin.d.ts index e64a217712d..f6fa8daaca5 100644 --- a/packages/@rescript/linux-x64/bin.d.ts +++ b/packages/@rescript/linux-x64/bin.d.ts @@ -3,10 +3,7 @@ export const binDir: string; export const binPaths: BinaryPaths; export type BinaryPaths = { - bsb_helper_exe: string; bsc_exe: string; - ninja_exe: string; - rescript_legacy_exe: string; rescript_tools_exe: string; rescript_editor_analysis_exe: string; rescript_exe: string; diff --git a/packages/@rescript/linux-x64/bin.js b/packages/@rescript/linux-x64/bin.js index 7b6f5bed04e..aff7c9c9d93 100644 --- a/packages/@rescript/linux-x64/bin.js +++ b/packages/@rescript/linux-x64/bin.js @@ -5,10 +5,7 @@ import * as path from "node:path"; export const binDir = path.join(import.meta.dirname, "bin"); export const binPaths = { - bsb_helper_exe: path.join(binDir, "bsb_helper.exe"), bsc_exe: path.join(binDir, "bsc.exe"), - ninja_exe: path.join(binDir, "ninja.exe"), - rescript_legacy_exe: path.join(binDir, "rescript-legacy.exe"), rescript_tools_exe: path.join(binDir, "rescript-tools.exe"), rescript_editor_analysis_exe: path.join( binDir, diff --git a/packages/@rescript/linux-x64/package.json b/packages/@rescript/linux-x64/package.json index 212b83a8617..3049f2086f1 100644 --- a/packages/@rescript/linux-x64/package.json +++ b/packages/@rescript/linux-x64/package.json @@ -30,10 +30,7 @@ "access": "public", "provenance": true, "executableFiles": [ - "./bin/bsb_helper.exe", "./bin/bsc.exe", - "./bin/ninja.exe", - "./bin/rescript-legacy.exe", "./bin/rescript-editor-analysis.exe", "./bin/rescript-tools.exe", "./bin/rescript.exe" diff --git a/packages/@rescript/win32-x64/bin.d.ts b/packages/@rescript/win32-x64/bin.d.ts index e64a217712d..f6fa8daaca5 100644 --- a/packages/@rescript/win32-x64/bin.d.ts +++ b/packages/@rescript/win32-x64/bin.d.ts @@ -3,10 +3,7 @@ export const binDir: string; export const binPaths: BinaryPaths; export type BinaryPaths = { - bsb_helper_exe: string; bsc_exe: string; - ninja_exe: string; - rescript_legacy_exe: string; rescript_tools_exe: string; rescript_editor_analysis_exe: string; rescript_exe: string; diff --git a/packages/@rescript/win32-x64/bin.js b/packages/@rescript/win32-x64/bin.js index 7b6f5bed04e..aff7c9c9d93 100644 --- a/packages/@rescript/win32-x64/bin.js +++ b/packages/@rescript/win32-x64/bin.js @@ -5,10 +5,7 @@ import * as path from "node:path"; export const binDir = path.join(import.meta.dirname, "bin"); export const binPaths = { - bsb_helper_exe: path.join(binDir, "bsb_helper.exe"), bsc_exe: path.join(binDir, "bsc.exe"), - ninja_exe: path.join(binDir, "ninja.exe"), - rescript_legacy_exe: path.join(binDir, "rescript-legacy.exe"), rescript_tools_exe: path.join(binDir, "rescript-tools.exe"), rescript_editor_analysis_exe: path.join( binDir, diff --git a/packages/@rescript/win32-x64/package.json b/packages/@rescript/win32-x64/package.json index 835501f3f84..b0f73b5296b 100644 --- a/packages/@rescript/win32-x64/package.json +++ b/packages/@rescript/win32-x64/package.json @@ -30,10 +30,7 @@ "access": "public", "provenance": true, "executableFiles": [ - "./bin/bsb_helper.exe", "./bin/bsc.exe", - "./bin/ninja.exe", - "./bin/rescript-legacy.exe", "./bin/rescript-editor-analysis.exe", "./bin/rescript-tools.exe", "./bin/rescript.exe" diff --git a/packages/artifacts.json b/packages/artifacts.json index e7a445b3efe..54f54b241c2 100644 --- a/packages/artifacts.json +++ b/packages/artifacts.json @@ -7,19 +7,13 @@ "LICENSE", "README.md", "cli/bsc.js", - "cli/bstracing.js", "cli/common/args.js", "cli/common/bins.js", - "cli/common/bsb.js", "cli/common/minisocket.js", "cli/common/runtime.js", - "cli/rescript-legacy.js", - "cli/rescript-legacy/dump.js", - "cli/rescript-legacy/format.js", "cli/rescript-tools.js", "cli/rescript.js", "docs/docson/build-schema.json", - "ninja/COPYING", "package.json" ], "@rescript/runtime": [ diff --git a/packages/playground/.gitignore b/packages/playground/.gitignore index 18ad2e07225..988f87fb160 100644 --- a/packages/playground/.gitignore +++ b/packages/playground/.gitignore @@ -23,7 +23,6 @@ lib/bs *.mliast .vscode .merlin -.bsb.lock /node_modules/ /lib/ .DS_Store diff --git a/rewatch/src/cli.rs b/rewatch/src/cli.rs index e0a0132773b..c59fd3918bb 100644 --- a/rewatch/src/cli.rs +++ b/rewatch/src/cli.rs @@ -40,8 +40,7 @@ pub enum FileExtension { #[command(version)] #[command(after_help = "Notes: - If no command is provided, the build command is run by default. See `rescript help build` for more information. - - To create a new ReScript project, or to add ReScript to an existing project, use https://github.com/rescript-lang/create-rescript-app. - - For the legacy (pre-v12) build system, run `rescript-legacy`.")] + - To create a new ReScript project, or to add ReScript to an existing project, use https://github.com/rescript-lang/create-rescript-app.")] pub struct Cli { /// Verbosity: /// -v -> Debug diff --git a/rewatch/testrepo/.gitignore b/rewatch/testrepo/.gitignore index d9170dc4bba..9d483a77b40 100644 --- a/rewatch/testrepo/.gitignore +++ b/rewatch/testrepo/.gitignore @@ -1,7 +1,6 @@ .DS_Store **/node_modules/ **/lib/ -.bsb.lock .merlin .DS_Store .cmi diff --git a/scripts/buildNinjaBinary.js b/scripts/buildNinjaBinary.js deleted file mode 100755 index 36f7e9cf155..00000000000 --- a/scripts/buildNinjaBinary.js +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env node - -// @ts-check - -import { execSync } from "node:child_process"; -import { ninjaDir } from "#dev/paths"; - -const platform = process.platform; -const buildCommand = "python3 configure.py --bootstrap --verbose"; - -if (platform === "win32") { - // On Windows, the build uses the MSVC compiler which needs to be on the path. - execSync(buildCommand, { cwd: ninjaDir }); -} else { - if (process.platform === "darwin") { - process.env.CXXFLAGS = "-flto"; - } - execSync(buildCommand, { stdio: [0, 1, 2], cwd: ninjaDir }); -} diff --git a/scripts/copyExes.js b/scripts/copyExes.js index 7e5ea182a25..a7e2aadd92b 100755 --- a/scripts/copyExes.js +++ b/scripts/copyExes.js @@ -9,7 +9,7 @@ import * as fs from "node:fs"; import * as path from "node:path"; import { parseArgs } from "node:util"; import { binDir } from "#cli/bins"; -import { compilerBinDir, ninjaDir, rewatchDir } from "#dev/paths"; +import { compilerBinDir, rewatchDir } from "#dev/paths"; const args = parseArgs({ args: process.argv.slice(2), @@ -20,9 +20,6 @@ const args = parseArgs({ compiler: { type: "boolean", }, - ninja: { - type: "boolean", - }, rewatch: { type: "boolean", }, @@ -30,19 +27,12 @@ const args = parseArgs({ }); const shouldCopyCompiler = args.values.all || args.values.compiler; -const shouldCopyNinja = args.values.all || args.values.ninja; const shouldCopyRewatch = args.values.all || args.values.rewatch; if (shouldCopyCompiler) { - copyExe(compilerBinDir, "rescript-legacy"); copyExe(compilerBinDir, "rescript-editor-analysis"); copyExe(compilerBinDir, "rescript-tools"); copyExe(compilerBinDir, "bsc"); - copyExe(compilerBinDir, "bsb_helper"); -} - -if (shouldCopyNinja) { - copyExe(ninjaDir, "ninja"); } if (shouldCopyRewatch) { diff --git a/scripts/test.js b/scripts/test.js index c0a4537b393..9e62182b718 100644 --- a/scripts/test.js +++ b/scripts/test.js @@ -23,7 +23,7 @@ import { let ounitTest = false; let mochaTest = false; -let bsbTest = false; +let buildTest = false; let formatTest = false; let runtimeDocstrings = false; @@ -35,8 +35,8 @@ if (process.argv.includes("-mocha")) { mochaTest = true; } -if (process.argv.includes("-bsb")) { - bsbTest = true; +if (process.argv.includes("-build")) { + buildTest = true; } if (process.argv.includes("-format")) { @@ -50,7 +50,7 @@ if (process.argv.includes("-docstrings")) { if (process.argv.includes("-all")) { ounitTest = true; mochaTest = true; - bsbTest = true; + buildTest = true; formatTest = true; runtimeDocstrings = true; } @@ -108,7 +108,7 @@ if (mochaTest) { }); } -if (bsbTest) { +if (buildTest) { console.log("Doing build_tests"); const files = fs.readdirSync(buildTestDir); diff --git a/tests/build_tests/cli_help/input.js b/tests/build_tests/cli_help/input.js index 479effb4e7f..8c7828e34ec 100755 --- a/tests/build_tests/cli_help/input.js +++ b/tests/build_tests/cli_help/input.js @@ -28,8 +28,7 @@ const cliHelp = "\n" + "Notes:\n" + " - If no command is provided, the build command is run by default. See `rescript help build` for more information.\n" + - " - To create a new ReScript project, or to add ReScript to an existing project, use https://github.com/rescript-lang/create-rescript-app.\n" + - " - For the legacy (pre-v12) build system, run `rescript-legacy`.\n"; + " - To create a new ReScript project, or to add ReScript to an existing project, use https://github.com/rescript-lang/create-rescript-app.\n"; const buildHelp = "Build the project (default command)\n" + diff --git a/tests/ounit_tests/dune b/tests/ounit_tests/dune index 56ac24bd1fd..e0bcd078bcb 100644 --- a/tests/ounit_tests/dune +++ b/tests/ounit_tests/dune @@ -11,4 +11,4 @@ (<> %{profile} browser)) (flags (:standard -w +a-4-9-30-40-41-42-48-70)) - (libraries bsb bsb_helper core ounit2 analysis)) + (libraries core ounit2 analysis)) diff --git a/tests/ounit_tests/ounit_bsb_pkg_tests.ml b/tests/ounit_tests/ounit_bsb_pkg_tests.ml deleted file mode 100644 index b2c8fc6bb77..00000000000 --- a/tests/ounit_tests/ounit_bsb_pkg_tests.ml +++ /dev/null @@ -1,105 +0,0 @@ -let ( >:: ), ( >::: ) = OUnit.(( >:: ), ( >::: )) - -let printer_string x = x -let ( =~ ) = OUnit.assert_equal ~printer:printer_string - -let scope_test s (a, b, c) = - match Bsb_pkg_types.extract_pkg_name_and_file s with - | Scope (a0, b0), c0 -> - a =~ a0; - b =~ b0; - c =~ c0 - | Global _, _ -> OUnit.assert_failure __LOC__ - -let global_test s (a, b) = - match Bsb_pkg_types.extract_pkg_name_and_file s with - | Scope _, _ -> OUnit.assert_failure __LOC__ - | Global a0, b0 -> - a =~ a0; - b =~ b0 - -let s_test0 s (a, b) = - match Bsb_pkg_types.string_as_package s with - | Scope (name, scope) -> - a =~ name; - b =~ scope - | _ -> OUnit.assert_failure __LOC__ - -let s_test1 s a = - match Bsb_pkg_types.string_as_package s with - | Global x -> a =~ x - | _ -> OUnit.assert_failure __LOC__ - -let group0 = - Map_string.of_list - [ - ( "Liba", - { - Bsb_db.info = Impl_intf; - dir = "a"; - case = false; - name_sans_extension = "liba"; - } ); - ] -let group1 = - Map_string.of_list - [ - ( "Ciba", - { - Bsb_db.info = Impl_intf; - dir = "b"; - case = false; - name_sans_extension = "liba"; - } ); - ] - -let parse_db db : Bsb_db_decode.t = - let buf = Ext_buffer.create 10_000 in - Bsb_db_encode.encode db buf; - let s = Ext_buffer.contents buf in - Bsb_db_decode.decode s - -let suites = - __FILE__ - >::: [ - ( __LOC__ >:: fun _ -> - scope_test "@hello/hi" ("hi", "@hello", ""); - - scope_test "@hello/hi/x" ("hi", "@hello", "x"); - - scope_test "@hello/hi/x/y" ("hi", "@hello", "x/y") ); - ( __LOC__ >:: fun _ -> - global_test "hello" ("hello", ""); - global_test "hello/x" ("hello", "x"); - global_test "hello/x/y" ("hello", "x/y") ); - ( __LOC__ >:: fun _ -> - s_test0 "@x/y" ("y", "@x"); - s_test0 "@x/y/z" ("y/z", "@x"); - s_test1 "xx" "xx"; - s_test1 "xx/yy/zz" "xx/yy/zz" ); - ( __LOC__ >:: fun _ -> - match parse_db {lib = group0; dev = group1} with - | { - lib = Group {modules = [|"Liba"|]}; - dev = Group {modules = [|"Ciba"|]}; - } -> - OUnit.assert_bool __LOC__ true - | _ -> OUnit.assert_failure __LOC__ ); - ( __LOC__ >:: fun _ -> - match parse_db {lib = group0; dev = Map_string.empty} with - | {lib = Group {modules = [|"Liba"|]}; dev = Dummy} -> - OUnit.assert_bool __LOC__ true - | _ -> OUnit.assert_failure __LOC__ ); - ( __LOC__ >:: fun _ -> - match parse_db {lib = Map_string.empty; dev = group1} with - | {lib = Dummy; dev = Group {modules = [|"Ciba"|]}} -> - OUnit.assert_bool __LOC__ true - | _ -> OUnit.assert_failure __LOC__ ) - (* __LOC__ >:: begin fun _ -> - OUnit.assert_equal parse_data_one data_one - end ; - __LOC__ >:: begin fun _ -> - - OUnit.assert_equal parse_data_two data_two - end *); - ] diff --git a/tests/ounit_tests/ounit_bsb_regex_tests.ml b/tests/ounit_tests/ounit_bsb_regex_tests.ml deleted file mode 100644 index 9278126c7df..00000000000 --- a/tests/ounit_tests/ounit_bsb_regex_tests.ml +++ /dev/null @@ -1,180 +0,0 @@ -let ( >:: ), ( >::: ) = OUnit.(( >:: ), ( >::: )) - -let ( =~ ) = OUnit.assert_equal - -let test_eq x y = - Bsb_regex.global_substitute ~reg:"\\${rescript:\\([-a-zA-Z0-9]+\\)}" x - (fun _ groups -> - match groups with - | x :: _ -> x - | _ -> assert false) - =~ y - -let suites = - __FILE__ - >::: [ - ( __LOC__ >:: fun _ -> - test_eq - {| hi hi hi ${rescript:name} - ${rescript:x} - ${rescript:u} - |} - {| hi hi hi name - x - u - |} ); - ( __LOC__ >:: fun _ -> - test_eq "xx" "xx"; - test_eq "${rescript:x}" "x"; - test_eq "a${rescript:x}" "ax" ); - (__LOC__ >:: fun _ -> test_eq "${rescript:x}x" "xx"); - ( __LOC__ >:: fun _ -> - test_eq - {| -{ - "name": "${rescript:name}", - "version": "${rescript:proj-version}", - "sources": [ - "src" - ], - "dependencies": [ - ] -} -|} - {| -{ - "name": "name", - "version": "proj-version", - "sources": [ - "src" - ], - "dependencies": [ - ] -} -|} - ); - ( __LOC__ >:: fun _ -> - test_eq - {| -{ - "name": "${rescript:name}", - "version": "${rescript:proj-version}", - "scripts": { - "clean": "bsb -clean", - "clean:all": "bsb -clean-world", - "build": "bsb", - "build:all": "bsb -make-world", - "watch": "bsb -w", - }, - "keywords": [ - "ReScript" - ], - "license": "MIT", - "devDependencies": { - "bs-platform": "${rescript:bs-version}" - } -} -|} - {| -{ - "name": "name", - "version": "proj-version", - "scripts": { - "clean": "bsb -clean", - "clean:all": "bsb -clean-world", - "build": "bsb", - "build:all": "bsb -make-world", - "watch": "bsb -w", - }, - "keywords": [ - "ReScript" - ], - "license": "MIT", - "devDependencies": { - "bs-platform": "bs-version" - } -} -|} - ); - ( __LOC__ >:: fun _ -> - test_eq - {| -{ - "version": "0.1.0", - "command": "${rescript:bsb}", - "options": { - "cwd": "${workspaceRoot}" - }, - "isShellCommand": true, - "args": [ - "-w" - ], - "showOutput": "always", - "isWatching": true, - "problemMatcher": { - "fileLocation": "absolute", - "owner": "ocaml", - "watching": { - "activeOnStart": true, - "beginsPattern": ">>>> Start compiling", - "endsPattern": ">>>> Finish compiling" - }, - "pattern": [ - { - "regexp": "^File \"(.*)\", line (\\d+)(?:, characters (\\d+)-(\\d+))?:$", - "file": 1, - "line": 2, - "column": 3, - "endColumn": 4 - }, - { - "regexp": "^(?:(?:Parse\\s+)?(Warning|[Ee]rror)(?:\\s+\\d+)?:)?\\s+(.*)$", - "severity": 1, - "message": 2, - "loop": true - } - ] - } -} -|} - {| -{ - "version": "0.1.0", - "command": "bsb", - "options": { - "cwd": "${workspaceRoot}" - }, - "isShellCommand": true, - "args": [ - "-w" - ], - "showOutput": "always", - "isWatching": true, - "problemMatcher": { - "fileLocation": "absolute", - "owner": "ocaml", - "watching": { - "activeOnStart": true, - "beginsPattern": ">>>> Start compiling", - "endsPattern": ">>>> Finish compiling" - }, - "pattern": [ - { - "regexp": "^File \"(.*)\", line (\\d+)(?:, characters (\\d+)-(\\d+))?:$", - "file": 1, - "line": 2, - "column": 3, - "endColumn": 4 - }, - { - "regexp": "^(?:(?:Parse\\s+)?(Warning|[Ee]rror)(?:\\s+\\d+)?:)?\\s+(.*)$", - "severity": 1, - "message": 2, - "loop": true - } - ] - } -} -|} - ); - ] diff --git a/tests/ounit_tests/ounit_string_tests.ml b/tests/ounit_tests/ounit_string_tests.ml index ee0fe72565b..aba2e4114c4 100644 --- a/tests/ounit_tests/ounit_string_tests.ml +++ b/tests/ounit_tests/ounit_string_tests.ml @@ -423,9 +423,6 @@ let suites = cmp0 a b =~ cmp1 a b; cmp0 b a =~ cmp1 b a in - (* This is needed since deserialization/serialization - needs to be synced up for .bsbuild decoding - *) f "a" "A"; f "bcdef" "abcdef"; f "" "A"; diff --git a/tests/ounit_tests/ounit_tests_main.ml b/tests/ounit_tests/ounit_tests_main.ml index 37a1d7e597b..c390a9518be 100644 --- a/tests/ounit_tests/ounit_tests_main.ml +++ b/tests/ounit_tests/ounit_tests_main.ml @@ -18,8 +18,6 @@ let suites = Ounit_ident_mask_tests.suites; Ounit_utf8_test.suites; Ounit_unicode_tests.suites; - Ounit_bsb_regex_tests.suites; - Ounit_bsb_pkg_tests.suites; Ounit_util_tests.suites; ] diff --git a/tests/tools_tests/package.json b/tests/tools_tests/package.json index 3d25d4c6cc3..3f2cdc437b2 100644 --- a/tests/tools_tests/package.json +++ b/tests/tools_tests/package.json @@ -4,7 +4,7 @@ "scripts": { "build": "rescript build", "clean": "rescript clean", - "dev": "rescript -w" + "dev": "rescript --watch" }, "dependencies": { "@rescript/react": "link:../dependencies/rescript-react", diff --git a/yarn.lock b/yarn.lock index 67906db2678..c7a1fd61077 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2486,9 +2486,7 @@ __metadata: optional: true bin: bsc: cli/bsc.js - bstracing: cli/bstracing.js rescript: cli/rescript.js - rescript-legacy: cli/rescript-legacy.js rescript-tools: cli/rescript-tools.js languageName: unknown linkType: soft From ca3d411c251842ee90b0a76d102760a3dad97628 Mon Sep 17 00:00:00 2001 From: Christoph Knittel Date: Sat, 17 Jan 2026 12:08:30 +0100 Subject: [PATCH 2/3] Format build schema --- docs/docson/build-schema.json | 40 ++++++++--------------------------- 1 file changed, 9 insertions(+), 31 deletions(-) diff --git a/docs/docson/build-schema.json b/docs/docson/build-schema.json index 9a33b46b774..c79b88b5309 100644 --- a/docs/docson/build-schema.json +++ b/docs/docson/build-schema.json @@ -2,12 +2,7 @@ "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { "module-format": { - "enum": [ - "esmodule", - "commonjs", - "es6", - "es6-global" - ], + "enum": ["esmodule", "commonjs", "es6", "es6-global"], "description": "es6 and es6-global are deprecated. Default: commonjs." }, "suffix-spec": { @@ -28,9 +23,7 @@ "$ref": "#/definitions/suffix-spec" } }, - "required": [ - "module" - ] + "required": ["module"] }, "package-spec": { "oneOf": [ @@ -73,11 +66,7 @@ "analysis": { "type": "array", "items": { - "enum": [ - "dce", - "exception", - "termination" - ] + "enum": ["dce", "exception", "termination"] }, "description": "The types of analysis to activate. `dce` means dead code analysis, `exception` means exception analysis, and `termination` is to check for infinite loops." }, @@ -177,9 +166,7 @@ "description": "name of the directory" }, "type": { - "enum": [ - "dev" - ] + "enum": ["dev"] }, "files": { "oneOf": [ @@ -225,9 +212,7 @@ "description": "Selected modules, for example, [Module_a, Module_b] " }, { - "enum": [ - "all" - ] + "enum": ["all"] } ], "description": "Default: export all modules. It is recommended for library developers to hide some files/interfaces" @@ -280,9 +265,7 @@ } } }, - "required": [ - "dir" - ] + "required": ["dir"] }, { "title": "Single non-nested directory", @@ -320,9 +303,7 @@ "properties": { "version": { "type": "number", - "enum": [ - 4 - ], + "enum": [4], "description": "Whether to apply the specific version of JSX PPX transformation" }, "module": { @@ -509,8 +490,5 @@ } }, "additionalProperties": false, - "required": [ - "name", - "sources" - ] -} \ No newline at end of file + "required": ["name", "sources"] +} From b67a8e04dad07870a679a8d5a2965d1ccadd362a Mon Sep 17 00:00:00 2001 From: Christoph Knittel Date: Sat, 17 Jan 2026 16:36:15 +0100 Subject: [PATCH 3/3] CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f254f4d32e1..9269351f0b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ #### :boom: Breaking Change +- Remove the legacy build system. Going forward, only the modern build system is supported, and the `rescript-legacy` command is not available anymore. https://github.com/rescript-lang/rescript/pull/8186 - `Int.fromString` and `Float.fromString` use stricter number parsing and no longer uses an explicit radix argument, but instead supports parsing hexadecimal, binary and exponential notation. - Remove `external-stdlib` configuration option from `rescript.json`. This option was rarely used and is no longer supported.