Remove tools/build.py (#2865)

Testing regression: ASAN build removed.
This commit is contained in:
Ryan Dahl 2019-09-06 20:32:58 -04:00 committed by GitHub
parent 595b4daa77
commit acaff6d056
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 179 additions and 4022 deletions

View File

@ -179,20 +179,14 @@ install:
- cargo --version
before_build:
# Download clang and gn, generate ninja files.
- python tools\setup.py
# Start sccache, then throw away the S3 access key.
- ps: |-
sccache --start-server
$env:AWS_SECRET_ACCESS_KEY = $null
build_script:
# Build with Cargo first. Both builds produce a deno.exe in the same dir. We
# want the final one (which gets tested and released) to be built by Ninja.
- cargo build -vv --release --locked
- cargo clippy --all-targets --release --locked -- -D clippy::all
- python tools\build.py
- cargo build -vv --release --all-targets --locked
test_script:
- python tools\lint.py
@ -203,24 +197,6 @@ after_test:
# Stop sccache and show stats.
- ps: sccache --stop-server
# Verify that the build is fully up-to-date. Running ninja should be a no-op.
# This catches erroneous file cleanup, and incorrectly set up build deps.
- ps: |-
$out = ninja -C $env:DENO_BUILD_PATH -n -d explain
if ($out -notcontains "ninja: no work to do.") {
throw "Build should be up-to-date but isn't."
}
# Verify that the bundled javascript and typescript files are listed
# explicitly in cli_snapshots\BUILD.gn. This is not an air-tight check.
- ps: |-
$ignore = "test_util.ts", "unit_tests.ts", "unit_test_runner.ts", "*_test.ts"
Get-ChildItem "js" -File -Force -Name |
where { $name = $_; -not ($ignore | where { $name -like $_ }) } |
where { -not (Select-String -Pattern $_ -Path cli_snapshots\BUILD.gn `
-SimpleMatch -CaseSensitive) } |
foreach { throw "$_ should be listed in cli_snapshots\BUILD.gn but isn't." }
# If this build is going to be deployed, build a zip file.
- ps: |-
if ($env:APPVEYOR_REPO_TAG -eq "true") {

View File

@ -61,7 +61,6 @@ install:
rm -rf "$RUSTUP_HOME"toolchains/*/share
before_script:
- ./tools/setup.py
# Start sccache, then throw away the S3 access key.
- |-
sccache --start-server
@ -72,13 +71,14 @@ before_script:
script:
- ./tools/lint.py
- ./tools/test_format.py
- ./tools/build.py -C target/release
- cargo clippy --all-targets --release --locked -- -D clippy::all
- cargo build --release --locked --all-targets
- DENO_BUILD_MODE=release ./tools/test.py
jobs:
fast_finish: true
include:
- name: "gn release mac x86_64"
- name: "release mac x86_64"
os: osx
after_success:
- &gzip_release
@ -94,7 +94,7 @@ jobs:
repo: denoland/deno
skip-cleanup: true
- name: "gn release linux x86_64"
- name: "release linux x86_64"
os: linux
after_success:
- *gzip_release
@ -110,36 +110,3 @@ jobs:
branch: master
repo: denoland/deno
skip-cleanup: true
- name: "cargo release linux x86_64"
os: linux
script:
- ./tools/lint.py
- ./tools/test_format.py
- cargo build --release --locked
- cargo clippy --all-targets --release --locked -- -D clippy::all
- DENO_BUILD_MODE=release CARGO_TEST=1 ./tools/test.py
# LSAN: We are in the process of getting a completely clean LSAN build,
# but it will take some work. So for now we just run a subset of the
# tests. We want to detect leaks during the build process as well as
# when executing the tests. So set the ASAN_OPTIONS env var before
# build.py is run.
- name: "asan/lsan linux"
os: linux
script:
- ./tools/lint.py
- ./tools/test_format.py
- echo is_asan=true >> target/debug/args.gn
- echo is_lsan=true >> target/debug/args.gn
# TODO(ry) sccache doesn't support "-Xclang -fdebug-compilation-dir"
# Which is enabled for ASAN builds if symbol_level != 0.
# https://cs.chromium.org/chromium/src/build/config/compiler/BUILD.gn?l=1087&rcl=573da77f569b41e23527f2952938f492678ab4bb
# Ideally we can remove this constraint in the future.
- echo symbol_level=0 >> target/debug/args.gn
# Call gn gen again to make sure new args are recognized.
- third_party/depot_tools/gn gen target/debug
- export ASAN_OPTIONS=detect_leaks=1
- ./tools/build.py libdeno_test
- ./target/debug/libdeno_test

View File

@ -1,20 +1,6 @@
import("//build_extra/rust/rust.gni")
group("default") {
testonly = true
deps = [
":hyper_hello",
"cli:cli_test",
"cli:deno",
"core:default",
"core/libdeno:libdeno_test",
]
}
rust_executable("hyper_hello") {
source_root = "tools/hyper_hello/hyper_hello.rs"
extern_rlib = [
"hyper",
"ring",
"core/libdeno:default",
]
}

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
fn main() {}

View File

@ -1,2 +0,0 @@
REM Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
@"%PYTHON_EXE%" "%~dpn0.py" %*

View File

@ -1,14 +0,0 @@
#!/usr/bin/env python
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
import sys
import re
# Read the package version from Cargo.toml and output as json
cargo_toml_path = sys.argv[1]
for line in open(cargo_toml_path):
match = re.search('version = "(.*)"', line)
if match:
print('{"version": "' + match.group(1) + '"}')
break

View File

@ -1,2 +0,0 @@
REM Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
@"%PYTHON_EXE%" "%~dpn0.py" %*

View File

@ -1,203 +0,0 @@
#!/usr/bin/env python
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
#
# The Rust compiler normally builds source code directly into an executable.
# Internally, object code is produced, and then the (system) linker is called,
# but this all happens under the covers.
#
# However Deno's build system uses it's own linker. For it to successfully
# produce an executable from rustc-generated object code, it needs to link
# with a dozen or so "built-in" Rust libraries (as in: not Cargo crates),
# and we need to tell the linker which and where those .rlibs are.
#
# Hard-coding these libraries into the GN configuration isn't possible: the
# required .rlib files have some sort of hash code in their file name, and their
# location depends on how Rust is set up, and which toolchain is active.
#
# So instead, we have this script: it writes a list of linker options (ldflags)
# to stdout, separated by newline characters. It is called from `rust.gni` when
# GN is generating ninja files (it doesn't run in the build phase).
#
# There is no official way through which rustc will give us the information
# we need, so a "back door" is used. We tell `rustc` to compile a (dummy)
# program, and to use a custom linker. This "linker" doesn't actually link
# anything; it just dumps it's argv to a temporary file. When rustc is done,
# this script then reads the linker arguments from that temporary file, and
# then filters it to remove flags that are irrelevant or undesirable.
import json
import re
import sys
import os
from os import path
import subprocess
import tempfile
def capture_linker_args(argsfile_path):
with open(argsfile_path, "wb") as argsfile:
argsfile.write("\n".join(sys.argv[1:]))
def get_ldflags(rustc_args):
# Prepare the environment for rustc.
rustc_env = os.environ.copy()
# We'll capture the arguments rustc passes to the linker by telling it
# that this script *is* the linker.
# On Posix systems, this file is directly executable thanks to it's shebang.
# On Windows, we use a .cmd wrapper file.
if os.name == "nt":
rustc_linker_base, _rustc_linker_ext = path.splitext(__file__)
rustc_linker = rustc_linker_base + ".cmd"
else:
rustc_linker = __file__
# Make sure that when rustc invokes this script, it uses the same version
# of the Python interpreter as we're currently using. On Posix systems this
# is done making the Python directory the first element of PATH.
# On Windows, the wrapper script uses the PYTHON_EXE environment variable.
if os.name == "nt":
rustc_env["PYTHON_EXE"] = sys.executable
else:
python_dir = path.dirname(sys.executable)
rustc_env["PATH"] = python_dir + path.pathsep + os.environ["PATH"]
# Create a temporary file to write captured Rust linker arguments to.
# Unfortunately we can't use tempfile.NamedTemporaryFile here, because the
# file it creates can't be open in two processes at the same time.
argsfile_fd, argsfile_path = tempfile.mkstemp()
rustc_env["ARGSFILE_PATH"] = argsfile_path
try:
# Build the rustc command line.
# * `-Clinker=` tells rustc to use our fake linker.
# * `-Csave-temps` prevents rustc from deleting object files after
# linking. We need to preserve the extra object file with allocator
# symbols (`_rust_alloc` etc.) in it that rustc produces.
rustc_cmd = [
"rustc",
"-Clinker=" + rustc_linker,
"-Csave-temps",
] + rustc_args
# Spawn the rust compiler.
rustc_proc = subprocess.Popen(
rustc_cmd,
env=rustc_env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
# Forward rustc's output to stderr.
for line in rustc_proc.stdout:
# Suppress the warning:
# `-C save-temps` might not produce all requested temporary
# products when incremental compilation is enabled.
# It's pointless, because incremental compilation is disabled.
if re.match(r"^warning:.*save-temps.*incremental compilation",
line):
continue
# Also, do not write completely blank lines to stderr.
if line.strip() == "":
continue
sys.stderr.write(line)
# The rustc process should return zero. If not, raise an exception.
rustc_retcode = rustc_proc.wait()
if rustc_retcode != 0:
raise subprocess.CalledProcessError(rustc_retcode, rustc_cmd)
# Read captured linker arguments from argsfile.
argsfile_size = os.fstat(argsfile_fd).st_size
argsfile_content = os.read(argsfile_fd, argsfile_size)
args = argsfile_content.split("\n")
except OSError as e: # Note: in python 3 this will be a FileNotFoundError.
print "Error executing rustc command (is rust installed?):"
print " ".join(rustc_cmd) + "\n"
raise e
finally:
# Close and delete the temporary file.
os.close(argsfile_fd)
os.unlink(argsfile_path)
# From the list of captured linker arguments, build the list of ldflags that
# we actually need.
ldflags = []
next_arg_is_flag_value = False
for arg in args:
# Note that within the following if/elif blocks, `pass` means that
# that captured arguments gets included in `ldflags`. The final `else`
# clause filters out unrecognized/unwanted flags.
if next_arg_is_flag_value:
# We're looking at a value that follows certain parametric flags,
# e.g. the path in '-L <path>'.
next_arg_is_flag_value = False
elif arg.endswith(".rlib"):
# Built-in Rust library, e.g. `libstd-8524caae8408aac2.rlib`.
pass
elif re.match(r"^empty_crate\.[a-z0-9]+\.rcgu.o$", arg):
# This file is needed because it contains certain allocator
# related symbols (e.g. `__rust_alloc`, `__rust_oom`).
# The Rust compiler normally generates this file just before
# linking an executable. We pass `-Csave-temps` to rustc so it
# doesn't delete the file when it's done linking.
pass
elif arg.endswith(".crate.allocator.rcgu.o"):
# Same as above, but for rustc version 1.29.0 and older.
pass
elif arg.endswith(".lib") and not arg.startswith("msvcrt"):
# Include most Windows static/import libraries (e.g. `ws2_32.lib`).
# However we ignore Rusts choice of C runtime (`mvcrt*.lib`).
# Rust insists on always using the release "flavor", even in debug
# mode, which causes conflicts with other libraries we link with.
pass
elif arg.upper().startswith("/LIBPATH:"):
# `/LIBPATH:<path>`: Linker search path (Microsoft style).
pass
elif arg == "-l" or arg == "-L":
# `-l <name>`: Link with library (GCC style).
# `-L <path>`: Linker search path (GCC style).
next_arg_is_flag_value = True # Ensure flag argument is captured.
elif arg == "-Wl,--start-group" or arg == "-Wl,--end-group":
# Start or end of an archive group (GCC style).
pass
else:
# Not a flag we're interested in -- don't add it to ldflags.
continue
ldflags += [arg]
return ldflags
def get_version():
version = subprocess.check_output(["rustc", "--version"])
version = version.strip() # Remove trailing newline.
return version
def main():
# If ARGSFILE_PATH is set this script is being invoked by rustc, which
# thinks we are a linker. All we do now is write our argv to the specified
# file and exit. Further processing is done by our grandparent process,
# also this script but invoked by gn.
argsfile_path = os.getenv("ARGSFILE_PATH")
if argsfile_path is not None:
return capture_linker_args(argsfile_path)
empty_crate_source = path.join(path.dirname(__file__), "empty_crate.rs")
info = {
"version": get_version(),
"ldflags_bin": get_ldflags([empty_crate_source]),
"ldflags_test": get_ldflags([empty_crate_source, "--test"])
}
# Write the information dict as a json object.
json.dump(info, sys.stdout)
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,47 +0,0 @@
#!/usr/bin/env python
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
# This file just executes its arguments, except that it allows overriding
# environment variables using command-line arguments.
import subprocess
import sys
import os
import re
args = []
env = os.environ.copy()
if sys.platform == 'win32':
# On Windows, when gn is setting up the build toolchain, it produces a set
# of environment variables that are required to invoke the right build
# toolchain. We need to load those environment variables here too in order
# for rustc to be able to successfully invoke the linker tool.
# The file is in 'windows environment block' format, which contains
# multiple 'key=value' pairs, separated by '\0' bytes, and terminated by
# two '\0' bytes at the end.
gn_env_pairs = open("environment.x64").read()[:-2].split('\0')
gn_env = dict([pair.split('=', 1) for pair in gn_env_pairs])
env.update(gn_env)
# This is for src/msg.rs to know where to find msg_generated.rs.
# When building with Cargo this variable is set by build.rs.
env["GN_OUT_DIR"] = os.path.abspath(".")
assert os.path.isdir(env["GN_OUT_DIR"])
# Environment variables can be specified on the command line using
# '--env=variable=value' flags. These flags are not passed through to rustc.
# This is useful to set env vars that are normally automatically set by Cargo,
# e.g. CARGO_PKG_NAME, CARGO_PKG_VERSION, OUT_DIR, etc.
for arg in sys.argv[1:]:
match = re.search('--env=([^=]+)=(.*)', arg)
if match:
key, value = match.groups()
if key == "OUT_DIR":
# OUT_DIR needs to contain an absolute path.
value = os.path.abspath(value)
env[key] = value
else:
args.append(arg)
sys.exit(subprocess.call(args, env=env))

View File

@ -1,380 +0,0 @@
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
declare_args() {
# Absolute path of rust build files.
rust_build = "//build_extra/rust/"
# Wrapper executable for rustc invocations. This can be used for a caching
# utility, e.g. sccache.
rustc_wrapper = ""
# treat the warnings in rust files as errors
rust_treat_warnings_as_errors = true
}
if (is_linux) {
executable_suffix = ""
shared_lib_prefix = "lib"
shared_lib_suffix = ".so"
} else if (is_mac) {
executable_suffix = ""
shared_lib_prefix = "lib"
shared_lib_suffix = ".dylib"
} else if (is_win) {
executable_suffix = ".exe"
shared_lib_prefix = ""
shared_lib_suffix = ".dll"
} else {
assert(false, "Unsupported platform")
}
# To simplify transitive dependency management with gn, we build all rust
# crates into the same directory. We need to be careful not to have crates
# with the same name.
out_dir = "$root_out_dir/rust_crates"
# The official way of building Rust executables is to to let rustc do the
# linking. However, we'd prefer to leave it in the hands of gn/ninja:
# * It allows us to use source sets.
# * It allows us to use the bundled lld that Chromium and V8 use.
# * We have more control over build flags.
# * To sidestep rustc weirdness (e.g. on Windows, it always links with the
# release C runtime library, even for debug builds).
#
# The `get_rustc_info` tool outputs the linker flags that are needed to
# successfully link rustc object code into an executable.
# We generate two sets of ldflags:
# `ldflags_bin` : Used for rust_executable targets.
# `ldflags_test`: Used for rust_test targets; includes the test harness.
#
# The tool works by compiling and linking something with rustc, and analyzing
# the arguments it passes to the system linker. That's what dummy.rs is for.
_rustc_info = exec_script("get_rustc_info.py", [], "json")
template("_rust_crate") {
config_name = "${target_name}_config"
action_name = "${target_name}_rustc"
forward_variables_from(invoker,
[
"cap_lints",
"cfg",
"crate_name",
"crate_type",
"crate_version",
"deps",
"edition",
"env",
"features",
"generated_source_dir",
"inputs",
"is_test",
"libs",
"source_root",
"testonly",
])
if (!defined(crate_name)) {
crate_name = target_name
}
if (!defined(crate_type)) {
crate_type = "rlib"
}
if (!defined(deps)) {
deps = []
}
if (!defined(edition)) {
edition = "2018"
}
if (!defined(is_test)) {
is_test = false
}
if (!defined(libs)) {
libs = []
}
if (defined(crate_version)) {
# In our build setup, all crates are built in the same directory. To avoid
# file name conflicts between when multiple versions of the same crate are
# built, add a unique suffix to output file names.
# Unfortunately the version number as such can't be used directly:
# everything after the first dot (.) is thrown away by rust, so in case of
# foo-0.2 vs foo-0.3 only the first '0' would be used, and conflicts would
# still occur. Therefore we use a hash of the version number instead.
crate_suffix = exec_script("//tools/sha256sum.py",
[
"--input=$crate_version",
"--format=-%.8s",
],
"trim string")
} else {
# Of most crates we use only one version; no need for all this difficulty.
crate_suffix = ""
}
if (crate_type == "bin") {
out_file = "$crate_name$crate_suffix.o"
emit_type = "obj"
} else if (crate_type == "proc-macro") {
out_file = "$shared_lib_prefix$crate_name$crate_suffix$shared_lib_suffix"
emit_type = "link"
} else if (crate_type == "rlib") {
out_file = "lib$crate_name$crate_suffix.rlib"
emit_type = "link"
}
out_path = "$out_dir/$out_file"
# Merge `invoker.extern` and `invoker.extern_rlib` into a single list.
extern = []
if (defined(invoker.extern)) {
extern += invoker.extern
}
if (defined(invoker.extern_rlib)) {
foreach(extern_crate_name, invoker.extern_rlib) {
extern += [
{
label = "$rust_build:$extern_crate_name"
crate_name = extern_crate_name
crate_type = "rlib"
},
]
}
}
# Add output file info to every entry in the 'extern' list.
extern_outputs = []
foreach(info, extern) {
extern_outputs += [
{
label = info.label
crate_type = info.crate_type
crate_name = info.crate_name
if (defined(info.crate_version)) {
crate_version = info.crate_version
crate_suffix = exec_script("//tools/sha256sum.py",
[
"--input=$crate_version",
"--format=-%.8s",
],
"trim string")
} else {
crate_suffix = ""
}
if (defined(info.crate_alias)) {
crate_alias = info.crate_alias
} else {
crate_alias = info.crate_name
}
if (crate_type == "rlib") {
out_file = "lib$crate_name$crate_suffix.rlib"
} else if (info.crate_type == "proc_macro") {
out_file =
"$shared_lib_prefix$crate_name$crate_suffix$shared_lib_suffix"
}
out_path = "$out_dir/$out_file"
},
]
}
config(config_name) {
foreach(info, extern_outputs) {
if (info.crate_type == "rlib") {
libs += [ info.out_path ]
}
}
lib_dirs = [ out_dir ]
}
source_set(target_name) {
public_deps = [
":$action_name",
]
libs += [ out_path ]
all_dependent_configs = [ ":$config_name" ]
}
action(action_name) {
script = "//build_extra/rust/run.py"
sources = [
source_root,
]
outputs = [
out_path,
]
depfile = "$out_dir/$crate_name$crate_suffix.d"
if (rustc_wrapper != "") {
args = [ rustc_wrapper ]
} else {
args = []
}
args += [
"rustc",
rebase_path(source_root, root_build_dir),
"--crate-name=$crate_name",
"--crate-type=$crate_type",
"--emit=$emit_type,dep-info",
"--edition=$edition",
"--out-dir=" + rebase_path(out_dir, root_build_dir),
# This is to disambiguate multiple versions of the same crate.
"-Cextra-filename=$crate_suffix",
# Appending the rustc version to the crate metadata ensures that they are
# rebuilt when rustc is upgraded, by changing the command line.
"-Cmetadata=\"${crate_suffix}_${_rustc_info.version}\"",
# This is needed for transitive dependencies.
"-L",
"dependency=" + rebase_path(out_dir, root_build_dir),
# Use colorful output even if stdout is redirected and not a tty.
"--color=always",
]
if (is_win) {
# Proc-macro crates need to be linked by rustc itself, because rustc
# doesn't expose all the information necessary to produce the correct
# linker invocation ourselves. However gn's setup creates an environment
# where link.exe doesn't always work, so we direct rustc to use lld-link,
# and explicitly load the proper environment that makes it work in run.py.
args += [
"-Clinker-flavor=lld-link",
"-Clinker=" + rebase_path(
"//third_party/llvm-build/Release+Asserts/bin/lld-link.exe",
root_build_dir),
]
}
if (is_debug) {
args += [ "-g" ]
}
if (is_official_build) {
args += [ "-O" ]
}
if (is_test) {
args += [ "--test" ]
}
if (rust_treat_warnings_as_errors) {
args += [ "-Dwarnings" ]
}
if (defined(cap_lints)) {
args += [
"--cap-lints",
cap_lints,
]
}
if (defined(invoker.args)) {
args += invoker.args
}
if (defined(cfg)) {
foreach(c, cfg) {
args += [
"--cfg",
c,
]
}
}
if (defined(features)) {
foreach(f, features) {
args += [
"--cfg",
"feature=\"" + f + "\"",
]
}
}
# Build the list of '--extern' arguments from the 'extern_outputs' array.
foreach(info, extern_outputs) {
args += [
"--extern",
info.crate_alias + "=" + rebase_path(info.out_path, root_build_dir),
]
sources += [ info.out_path ]
deps += [ info.label ]
}
if (defined(generated_source_dir)) {
args += [
# Some crates (e.g. 'typenum') generate source files and place them in
# the directory indicated by the 'OUT_DIR' environment variable, which
# is normally set by Cargo. This flag tells run.py to set 'OUT_DIR' to
# the path where the current crate can find its generated sources.
"--env=OUT_DIR=" + rebase_path(generated_source_dir, root_build_dir),
]
}
if (defined(env)) {
foreach(e, env) {
args += [ "--env=$e" ]
}
}
}
}
template("rust_rlib") {
_rust_crate(target_name) {
forward_variables_from(invoker, "*")
crate_type = "rlib"
}
}
template("rust_proc_macro") {
_rust_crate(target_name) {
forward_variables_from(invoker, "*")
crate_type = "proc-macro"
}
}
template("rust_executable") {
bin_name = target_name + "_bin"
bin_label = ":" + bin_name
_rust_crate(bin_name) {
crate_type = "bin"
forward_variables_from(invoker, "*")
}
executable(target_name) {
forward_variables_from(invoker, "*")
if (defined(is_test) && is_test) {
ldflags = _rustc_info.ldflags_test
} else {
ldflags = _rustc_info.ldflags_bin
}
if (!defined(deps)) {
deps = []
}
deps += [ bin_label ]
if (defined(extern)) {
foreach(info, extern) {
if (info.crate_type == "rlib") {
deps += [ info.label ]
}
}
}
if (defined(extern_rlib)) {
foreach(extern_crate_name, extern_rlib) {
deps += [ "$rust_build:$extern_crate_name" ]
}
}
}
}
template("rust_test") {
rust_executable(target_name) {
forward_variables_from(invoker, "*")
is_test = true
testonly = true
}
}

View File

@ -1,103 +0,0 @@
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
import("//build/toolchain/cc_wrapper.gni")
import("//build_extra/rust/rust.gni")
main_extern = [
{
label = "../core:deno"
crate_name = "deno"
crate_type = "rlib"
},
{
label = "../cli_snapshots:deno_cli_snapshots"
crate_name = "deno_cli_snapshots"
crate_type = "rlib"
},
{
label = "../deno_typescript:deno_typescript"
crate_name = "deno_typescript"
crate_type = "rlib"
},
{
label = "$rust_build:serde_derive"
crate_name = "serde_derive"
crate_type = "proc_macro"
},
]
main_extern_rlib = [
"ansi_term",
"atty",
"clap",
"dirs",
"futures",
"http",
"hyper",
"hyper_rustls",
"indexmap",
"lazy_static",
"libc",
"log",
"rand",
"regex",
"remove_dir_all",
"reqwest",
"ring",
"rustyline",
"serde",
"serde_json",
"source_map_mappings",
"tempfile",
"termcolor",
"tokio",
"tokio_executor",
"tokio_fs",
"tokio_io",
"tokio_process",
"tokio_rustls",
"tokio_threadpool",
"url",
"utime",
]
if (is_win) {
main_extern_rlib += [
"fwdansi",
"winapi",
]
}
if (is_posix) {
main_extern_rlib += [ "nix" ]
}
# Reads the cargo info from Cargo.toml
deno_cargo_info = exec_script("../build_extra/rust/get_cargo_info.py",
[ rebase_path("Cargo.toml", root_build_dir) ],
"json")
rust_executable("deno") {
source_root = "main.rs"
extern = main_extern
extern_rlib = main_extern_rlib
# Extract version from Cargo.toml
# TODO integrate this into rust.gni by allowing the rust_executable template
# to specify a cargo.toml from which it will extract a version.
inputs = [
"Cargo.toml",
]
env = [ "CARGO_PKG_VERSION=${deno_cargo_info.version}" ]
}
rust_test("cli_test") {
source_root = "main.rs"
extern = main_extern
extern_rlib = main_extern_rlib
# Extract version from Cargo.toml
inputs = [
"Cargo.toml",
]
env = [
"CARGO_PKG_VERSION=${deno_cargo_info.version}",
"CARGO_MANIFEST_DIR=" + rebase_path("."),
]
}

View File

@ -1,121 +0,0 @@
import("//build_extra/rust/rust.gni")
rust_rlib("deno_cli_snapshots") {
source_root = "lib.rs"
generated_source_dir = rebase_path(root_out_dir)
deps = [
":deno_cli_snapshots_build_run",
]
}
ts_sources = [
"../js/base64.ts",
"../js/blob.ts",
"../js/body.ts",
"../js/buffer.ts",
"../js/build.ts",
"../js/chmod.ts",
"../js/chown.ts",
"../js/colors.ts",
"../js/compiler.ts",
"../js/console.ts",
"../js/console_table.ts",
"../js/copy_file.ts",
"../js/core.ts",
"../js/custom_event.ts",
"../js/deno.ts",
"../js/diagnostics.ts",
"../js/dir.ts",
"../js/dispatch.ts",
"../js/dispatch_json.ts",
"../js/dispatch_minimal.ts",
"../js/dom_file.ts",
"../js/dom_types.ts",
"../js/dom_util.ts",
"../js/error_stack.ts",
"../js/errors.ts",
"../js/event.ts",
"../js/event_target.ts",
"../js/fetch.ts",
"../js/file_info.ts",
"../js/files.ts",
"../js/form_data.ts",
"../js/format_error.ts",
"../js/get_random_values.ts",
"../js/globals.ts",
"../js/headers.ts",
"../js/io.ts",
"../js/lib.deno_runtime.d.ts",
"../js/lib.web_assembly.d.ts",
"../js/link.ts",
"../js/location.ts",
"../js/main.ts",
"../js/make_temp_dir.ts",
"../js/metrics.ts",
"../js/mkdir.ts",
"../js/mock_builtin.js",
"../js/net.ts",
"../js/os.ts",
"../js/performance.ts",
"../js/permissions.ts",
"../js/process.ts",
"../js/read_dir.ts",
"../js/read_file.ts",
"../js/read_link.ts",
"../js/remove.ts",
"../js/rename.ts",
"../js/repl.ts",
"../js/request.ts",
"../js/resources.ts",
"../js/stat.ts",
"../js/symlink.ts",
"../js/text_encoding.ts",
"../js/timers.ts",
"../js/truncate.ts",
"../js/type_directives.ts",
"../js/types.ts",
"../js/url.ts",
"../js/url_search_params.ts",
"../js/util.ts",
"../js/utime.ts",
"../js/version.ts",
"../js/window.ts",
"../js/workers.ts",
"../js/write_file.ts",
"../js/xeval.ts",
]
action("deno_cli_snapshots_build_run") {
script = "run.py"
inputs = ts_sources
outputs = [
"$root_out_dir/CLI_SNAPSHOT.bin",
"$root_out_dir/CLI_SNAPSHOT.js",
"$root_out_dir/CLI_SNAPSHOT.js.map",
"$root_out_dir/CLI_SNAPSHOT.d.ts",
"$root_out_dir/COMPILER_SNAPSHOT.bin",
"$root_out_dir/COMPILER_SNAPSHOT.js",
"$root_out_dir/COMPILER_SNAPSHOT.js.map",
"$root_out_dir/COMPILER_SNAPSHOT.d.ts",
]
args = [ rebase_path("$root_out_dir/deno_cli_snapshots_build", ".") ]
deps = [
":deno_cli_snapshots_build",
]
}
rust_executable("deno_cli_snapshots_build") {
source_root = "build.rs"
extern = [
{
label = "../deno_typescript:deno_typescript"
crate_name = "deno_typescript"
crate_type = "rlib"
},
{
label = "../core:deno"
crate_name = "deno"
crate_type = "rlib"
},
]
}

View File

@ -1,79 +0,0 @@
import("//build_extra/rust/rust.gni")
group("default") {
testonly = true
deps = [
":deno",
":deno_core_http_bench",
":deno_core_http_bench_test",
":deno_core_test",
]
}
group("deno_core_deps") {
deps = [
"libdeno:libdeno_static_lib",
"libdeno:v8",
]
}
# deno does not depend on flatbuffers nor tokio.
main_extern_rlib = [
"futures",
"libc",
"serde_json",
"log",
"url",
]
rust_rlib("deno") {
source_root = "lib.rs"
deps = [
":deno_core_deps",
]
extern_rlib = main_extern_rlib
}
rust_test("deno_core_test") {
source_root = "lib.rs"
deps = [
":deno_core_deps",
]
extern_rlib = main_extern_rlib
}
http_bench_extern = [
{
label = ":deno"
crate_name = "deno"
crate_type = "rlib"
},
]
http_bench_extern_rlib = [
"futures",
"lazy_static",
"libc",
"log",
"tokio",
]
if (is_win) {
http_bench_extern_rlib += [ "winapi" ]
}
rust_executable("deno_core_http_bench") {
source_root = "examples/http_bench.rs"
deps = [
":deno_core_deps",
]
extern = http_bench_extern
extern_rlib = http_bench_extern_rlib
}
rust_test("deno_core_http_bench_test") {
source_root = "examples/http_bench.rs"
deps = [
":deno_core_deps",
]
extern = http_bench_extern
extern_rlib = http_bench_extern_rlib
}

View File

@ -1,8 +1,5 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
// Run "cargo build -vv" if you want to see gn output.
mod gn {
include!("../tools/gn.rs");
}
fn main() {
let build = gn::Build::setup();
@ -12,5 +9,145 @@ fn main() {
build.gn_out_dir
);
build.run("core:deno_core_deps");
build.run("core/libdeno:default");
}
mod gn {
use std::env;
use std::path::{self, Path, PathBuf};
use std::process::Command;
pub struct Build {
gn_mode: String,
root: PathBuf,
pub gn_out_dir: String,
pub gn_out_path: PathBuf,
pub check_only: bool,
}
impl Build {
pub fn setup() -> Build {
let gn_mode = if cfg!(target_os = "windows") {
// On Windows, we need to link with a release build of libdeno, because
// rust always uses the release CRT.
// TODO(piscisaureus): make linking with debug libdeno possible.
String::from("release")
} else {
// Cargo sets PROFILE to either "debug" or "release", which conveniently
// matches the build modes we support.
env::var("PROFILE").unwrap()
};
// cd into workspace root.
assert!(env::set_current_dir("..").is_ok());
let root = env::current_dir().unwrap();
// If not using host default target the output folder will change
// target/release will become target/$TARGET/release
// Gn should also be using this output directory as well
// most things will work with gn using the default
// output directory but some tests depend on artifacts
// being in a specific directory relative to the main build output
let gn_out_path = root.join(format!("target/{}", gn_mode.clone()));
let gn_out_dir = normalize_path(&gn_out_path);
// Tell Cargo when to re-run this file. We do this first, so these directives
// can take effect even if something goes wrong later in the build process.
println!("cargo:rerun-if-env-changed=DENO_BUILD_PATH");
// TODO: this is obviously not appropriate here.
println!("cargo:rerun-if-env-changed=APPVEYOR_REPO_COMMIT");
// This helps Rust source files locate the snapshot, source map etc.
println!("cargo:rustc-env=GN_OUT_DIR={}", gn_out_dir);
// Detect if we're being invoked by the rust language server (RLS).
// Unfortunately we can't detect whether we're being run by `cargo check`.
let check_only = env::var_os("CARGO")
.map(PathBuf::from)
.as_ref()
.and_then(|p| p.file_stem())
.and_then(|f| f.to_str())
.map(|s| s.starts_with("rls"))
.unwrap_or(false);
if check_only {
// Enable the 'check_only' feature, which enables some workarounds in the
// rust source code to compile successfully without a bundle and snapshot
println!("cargo:rustc-cfg=feature=\"check-only\"");
}
Build {
gn_out_dir,
gn_out_path,
check_only,
gn_mode,
root,
}
}
pub fn run(&self, gn_target: &str) {
if !self.gn_out_path.join("build.ninja").exists() {
let mut cmd = Command::new("python");
cmd.env("DENO_BUILD_PATH", &self.gn_out_dir);
cmd.env("DENO_BUILD_MODE", &self.gn_mode);
cmd.env("DEPOT_TOOLS_WIN_TOOLCHAIN", "0");
cmd.arg("./tools/setup.py");
if env::var_os("DENO_NO_BINARY_DOWNLOAD").is_some() {
cmd.arg("--no-binary-download");
}
let status = cmd.status().expect("setup.py failed");
assert!(status.success());
}
let mut ninja = Command::new("third_party/depot_tools/ninja");
let ninja = if !cfg!(target_os = "windows") {
&mut ninja
} else {
// Windows needs special configuration. This is similar to the function of
// python_env() in //tools/util.py.
let python_path: Vec<String> = vec![
"third_party/python_packages",
"third_party/python_packages/win32",
"third_party/python_packages/win32/lib",
"third_party/python_packages/Pythonwin",
]
.into_iter()
.map(|p| self.root.join(p).into_os_string().into_string().unwrap())
.collect();
let orig_path = String::from(";")
+ &env::var_os("PATH").unwrap().into_string().unwrap();
let path = self
.root
.join("third_party/python_packages/pywin32_system32")
.into_os_string()
.into_string()
.unwrap();
ninja
.env("PYTHONPATH", python_path.join(";"))
.env("PATH", path + &orig_path)
.env("DEPOT_TOOLS_WIN_TOOLCHAIN", "0")
};
let status = ninja
.arg(gn_target)
.arg("-C")
.arg(&self.gn_out_dir)
.status()
.expect("ninja failed");
assert!(status.success());
}
}
// Utility function to make a path absolute, normalizing it to use forward
// slashes only. The returned value is an owned String, otherwise panics.
fn normalize_path<T: AsRef<Path>>(path: T) -> String {
path
.as_ref()
.to_str()
.unwrap()
.to_owned()
.chars()
.map(|c| if path::is_separator(c) { '/' } else { c })
.collect()
}
}

View File

@ -1,6 +1,15 @@
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
import("//third_party/v8/gni/v8.gni")
group("default") {
testonly = true
deps = [
":libdeno_static_lib",
":libdeno_test",
":v8",
]
}
config("deno_config") {
include_dirs = [ "//third_party/v8" ] # This allows us to v8/src/base/ libraries.
configs = [ "//third_party/v8:external_config" ]

View File

@ -1,22 +0,0 @@
import("//build_extra/rust/rust.gni")
rust_rlib("deno_typescript") {
source_root = "lib.rs"
generated_source_dir = "."
extern = [
{
label = "../core:deno"
crate_name = "deno"
crate_type = "rlib"
},
{
label = "$rust_build:serde_derive"
crate_name = "serde_derive"
crate_type = "proc_macro"
},
]
extern_rlib = [
"serde_json",
"serde",
]
}

@ -1 +1 @@
Subproject commit 2f1a94bafe78962c39f51bb4249cfe19fe11879d
Subproject commit d75b8c9c2b758d9450859081c8560ea673a5d81c

View File

@ -1,38 +0,0 @@
#!/usr/bin/env python
# Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
from __future__ import print_function
import argparse
import os
import sys
import third_party
from util import build_path, enable_ansi_colors, run
parser = argparse.ArgumentParser()
parser.add_argument(
"--release", help="Use target/release", action="store_true")
def main(argv):
enable_ansi_colors()
args, rest_argv = parser.parse_known_args(argv)
if "DENO_BUILD_MODE" not in os.environ:
if args.release:
os.environ["DENO_BUILD_MODE"] = "release"
ninja_args = rest_argv[1:]
if not "-C" in ninja_args:
if not os.path.isdir(build_path()):
print("Build directory '%s' does not exist." % build_path(),
"Run tools/setup.py")
sys.exit(1)
ninja_args = ["-C", build_path()] + ninja_args
run([third_party.ninja_path] + ninja_args,
env=third_party.google_env(),
quiet=True)
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -1,139 +0,0 @@
// Copyright 2018-2019 the Deno authors. All rights reserved. MIT license.
// This is used in cli/build.rs and core/build.rs to interface with the GN build
// system (which defines the deno build).
use std::env;
use std::path::{self, Path, PathBuf};
use std::process::Command;
pub struct Build {
gn_mode: String,
root: PathBuf,
pub gn_out_dir: String,
pub gn_out_path: PathBuf,
pub check_only: bool,
}
impl Build {
pub fn setup() -> Build {
let gn_mode = if cfg!(target_os = "windows") {
// On Windows, we need to link with a release build of libdeno, because
// rust always uses the release CRT.
// TODO(piscisaureus): make linking with debug libdeno possible.
String::from("release")
} else {
// Cargo sets PROFILE to either "debug" or "release", which conveniently
// matches the build modes we support.
env::var("PROFILE").unwrap()
};
// cd into workspace root.
assert!(env::set_current_dir("..").is_ok());
let root = env::current_dir().unwrap();
// If not using host default target the output folder will change
// target/release will become target/$TARGET/release
// Gn should also be using this output directory as well
// most things will work with gn using the default
// output directory but some tests depend on artifacts
// being in a specific directory relative to the main build output
let gn_out_path = root.join(format!("target/{}", gn_mode.clone()));
let gn_out_dir = normalize_path(&gn_out_path);
// Tell Cargo when to re-run this file. We do this first, so these directives
// can take effect even if something goes wrong later in the build process.
println!("cargo:rerun-if-env-changed=DENO_BUILD_PATH");
// TODO: this is obviously not appropriate here.
println!("cargo:rerun-if-env-changed=APPVEYOR_REPO_COMMIT");
// This helps Rust source files locate the snapshot, source map etc.
println!("cargo:rustc-env=GN_OUT_DIR={}", gn_out_dir);
// Detect if we're being invoked by the rust language server (RLS).
// Unfortunately we can't detect whether we're being run by `cargo check`.
let check_only = env::var_os("CARGO")
.map(PathBuf::from)
.as_ref()
.and_then(|p| p.file_stem())
.and_then(|f| f.to_str())
.map(|s| s.starts_with("rls"))
.unwrap_or(false);
if check_only {
// Enable the 'check_only' feature, which enables some workarounds in the
// rust source code to compile successfully without a bundle and snapshot
println!("cargo:rustc-cfg=feature=\"check-only\"");
}
Build {
gn_out_dir,
gn_out_path,
check_only,
gn_mode,
root,
}
}
pub fn run(&self, gn_target: &str) {
if !self.gn_out_path.join("build.ninja").exists() {
let status = Command::new("python")
.env("DENO_BUILD_PATH", &self.gn_out_dir)
.env("DENO_BUILD_MODE", &self.gn_mode)
.env("DEPOT_TOOLS_WIN_TOOLCHAIN", "0")
.arg("./tools/setup.py")
.status()
.expect("setup.py failed");
assert!(status.success());
}
let mut ninja = Command::new("third_party/depot_tools/ninja");
let ninja = if !cfg!(target_os = "windows") {
&mut ninja
} else {
// Windows needs special configuration. This is similar to the function of
// python_env() in //tools/util.py.
let python_path: Vec<String> = vec![
"third_party/python_packages",
"third_party/python_packages/win32",
"third_party/python_packages/win32/lib",
"third_party/python_packages/Pythonwin",
]
.into_iter()
.map(|p| self.root.join(p).into_os_string().into_string().unwrap())
.collect();
let orig_path = String::from(";")
+ &env::var_os("PATH").unwrap().into_string().unwrap();
let path = self
.root
.join("third_party/python_packages/pywin32_system32")
.into_os_string()
.into_string()
.unwrap();
ninja
.env("PYTHONPATH", python_path.join(";"))
.env("PATH", path + &orig_path)
.env("DEPOT_TOOLS_WIN_TOOLCHAIN", "0")
};
let status = ninja
.arg(gn_target)
.arg("-C")
.arg(&self.gn_out_dir)
.status()
.expect("ninja failed");
assert!(status.success());
}
}
// Utility function to make a path absolute, normalizing it to use forward
// slashes only. The returned value is an owned String, otherwise panics.
fn normalize_path<T: AsRef<Path>>(path: T) -> String {
path
.as_ref()
.to_str()
.unwrap()
.to_owned()
.chars()
.map(|c| if path::is_separator(c) { '/' } else { c })
.collect()
}

View File

@ -151,7 +151,8 @@ def hyper_http(hyper_hello_exe):
def http_benchmark(build_dir):
hyper_hello_exe = os.path.join(build_dir, "hyper_hello")
core_http_bench_exe = os.path.join(build_dir, "deno_core_http_bench")
core_http_bench_exe = os.path.join(build_dir,
"examples/deno_core_http_bench")
deno_exe = os.path.join(build_dir, "deno")
return {
# "deno_tcp" was once called "deno"

View File

@ -140,11 +140,6 @@ def generate_gn_args(mode):
# https://github.com/mozilla/sccache/issues/264
out += ["treat_warnings_as_errors=false"]
# Look for sccache; if found, set rustc_wrapper.
rustc_wrapper = cacher
if rustc_wrapper:
out += ['rustc_wrapper=%s' % gn_string(rustc_wrapper)]
return out

View File

@ -5,15 +5,6 @@ from test_util import DenoTestCase, run_tests
from util import executable_suffix, tests_path, run, run_output
# In the ninja/gn we build and test individually libdeno_test, cli_test,
# deno_core_test, deno_core_http_bench_test. When building with cargo, however
# we just run "cargo test".
# This is hacky but is only temporarily here until the ninja/gn build is
# removed.
def is_cargo_test():
return "CARGO_TEST" in os.environ
class TestTarget(DenoTestCase):
@staticmethod
def check_exists(filename):
@ -32,28 +23,15 @@ class TestTarget(DenoTestCase):
run([bin_file], quiet=True)
def test_cargo_test(self):
if is_cargo_test():
cargo_test = ["cargo", "test", "--all", "--locked"]
if os.environ["DENO_BUILD_MODE"] == "release":
run(cargo_test + ["--release"])
else:
run(cargo_test)
cargo_test = ["cargo", "test", "--all", "--locked"]
if "DENO_BUILD_MODE" in os.environ and \
os.environ["DENO_BUILD_MODE"] == "release":
run(cargo_test + ["--release"])
else:
run(cargo_test)
def test_libdeno(self):
if not is_cargo_test():
self._test("libdeno_test")
def test_cli(self):
if not is_cargo_test():
self._test("cli_test")
def test_core(self):
if not is_cargo_test():
self._test("deno_core_test")
def test_core_http_benchmark(self):
if not is_cargo_test():
self._test("deno_core_http_bench_test")
self._test("libdeno_test")
def test_no_color(self):
t = os.path.join(tests_path, "no_color.js")

View File

@ -153,21 +153,14 @@ git clone --recurse-submodules https://github.com/denoland/deno.git
Now we can start the build:
```bash
cd deno
./tools/setup.py
# You may need to ensure that sccache is running.
# (TODO it's unclear if this is necessary or not.)
# prebuilt/mac/sccache --start-server
# Build.
./tools/build.py
cargo build -vv
# Run.
./target/debug/deno tests/002_hello.ts
# Test.
./tools/test.py
CARGO_TEST=1 ./tools/test.py
# Format code.
./tools/format.py
@ -205,36 +198,36 @@ Extra steps for Windows users:
```bash
# Call ninja manually.
./third_party/depot_tools/ninja -C target/debug
ninja -C target/debug
# Build a release binary.
./tools/build.py --release deno
cargo build --release
# List executable targets.
./third_party/depot_tools/gn ls target/debug //:* --as=output --type=executable
gn ls target/debug //:* --as=output --type=executable
# List build configuration.
./third_party/depot_tools/gn args target/debug/ --list
gn args target/debug/ --list
# Edit build configuration.
./third_party/depot_tools/gn args target/debug/
gn args target/debug/
# Describe a target.
./third_party/depot_tools/gn desc target/debug/ :deno
./third_party/depot_tools/gn help
gn desc target/debug/ :deno
gn help
# Update third_party modules
git submodule update
# Skip downloading binary build tools and point the build
# to the system provided ones (for packagers of deno ...).
./tools/setup.py --no-binary-download
export DENO_BUILD_ARGS="clang_base_path=/usr clang_use_chrome_plugins=false"
DENO_GN_PATH=/usr/bin/gn DENO_NINJA_PATH=/usr/bin/ninja ./tools/build.py
export DENO_NO_BINARY_DOWNLOAD=1
DENO_GN_PATH=/usr/bin/gn DENO_NINJA_PATH=/usr/bin/ninja cargo build
```
Environment variables: `DENO_BUILD_MODE`, `DENO_BUILD_PATH`, `DENO_BUILD_ARGS`,
`DENO_DIR`, `DENO_GN_PATH`, `DENO_NINJA_PATH`.
`DENO_DIR`, `DENO_GN_PATH`, `DENO_NINJA_PATH`, `DENO_NO_BINARY_DOWNLOAD`.
## API reference
@ -980,7 +973,7 @@ To start profiling,
```sh
# Make sure we're only building release.
# Build deno and V8's d8.
./tools/build.py --release d8 deno
ninja -C target/release d8
# Start the program we want to benchmark with --prof
./target/release/deno tests/http_bench.ts --allow-net --v8-flags=--prof &