blob: d08b344706276db21d48b391ba21f995bb7697fe [file] [log] [blame]
# Copyright 2017 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe for building Clang toolchain."""
import contextlib
from recipe_engine.recipe_api import Property
from recipe_engine.config import Enum
from PB.go.chromium.org.luci.common.proto.srcman.manifest import Manifest
import re
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
"fuchsia/cas_util",
"fuchsia/cipd_util",
"fuchsia/git",
"fuchsia/goma",
"fuchsia/macos_sdk",
"fuchsia/status_check",
"fuchsia/toolchain",
"fuchsia/windows_sdk",
"recipe_engine/buildbucket",
"recipe_engine/cipd",
"recipe_engine/context",
"recipe_engine/file",
"recipe_engine/path",
"recipe_engine/platform",
"recipe_engine/properties",
"recipe_engine/raw_io",
"recipe_engine/resultdb",
"recipe_engine/step",
]
TARGET_TO_ARCH = {
"x64": "x86_64",
"arm64": "aarch64",
}
TARGETS = TARGET_TO_ARCH.keys()
LIBXML2_GIT = "https://fuchsia.googlesource.com/third_party/libxml2"
ZLIB_GIT = "https://fuchsia.googlesource.com/third_party/zlib"
CIPD_SERVER_HOST = "chrome-infra-packages.appspot.com"
# TODD(fxbug.dev/91157): Restore the file name once
# path length issue is properly fixed.
RESULTDB_JSON = "r.j"
PROPERTIES = {
"repository": Property(
kind=str,
help="Git repository URL",
default="https://llvm.googlesource.com/llvm-project",
),
"revision": Property(kind=str, help="Git revision", default="refs/heads/main"),
"platform": Property(kind=str, help="CIPD platform for the target", default=None),
"do_2stage": Property(kind=bool, help="Do a 2-stage build", default=None),
"enable_lto": Property(
kind=Enum("full", "thin"), help="Enable link-time optimization", default=None
),
"enable_lld": Property(kind=bool, help="Enable LLD linker", default=None),
"enable_assertions": Property(kind=bool, help="Enable assertions", default=False),
"enable_backtraces": Property(kind=bool, help="Enable backtraces", default=False),
"use_inliner_model": Property(kind=bool, help="Use inliner model", default=True),
"builders": Property(
kind=dict,
help="Mapping from platform name to list of builders to trigger",
default=None,
),
}
def slashes(api, path):
return path.replace("\\", "/") if api.platform.is_win else path
def build_zlib(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
zlib_dir = api.path["start_dir"].join("zlib")
src_dir = zlib_dir.join("src")
_, revision = api.git.checkout(ZLIB_GIT, src_dir, ref="refs/tags/v1.2.11")
git_checkout = manifest.directories[str(src_dir)].git_checkout
git_checkout.repo_url = ZLIB_GIT
git_checkout.revision = revision
build_dir = zlib_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ (["-DAMD64=ON"] if not api.platform.is_win else [])
+ [src_dir],
)
api.step(
"build",
[cipd_dir.join("ninja"), "-j%d" % ninja_jobs],
)
with api.context(env={"DESTDIR": destdir}):
api.step(
"install",
[cipd_dir.join("ninja"), "install"],
)
@contextlib.contextmanager
def resultdb_context(api, build_dir):
try:
yield
finally:
upload_to_resultdb(api, build_dir)
def upload_to_resultdb(api, build_dir):
if not api.resultdb.enabled:
return # pragma: no cover
test_data = "%s\n%s\n" % (
build_dir.join(RESULTDB_JSON),
build_dir.join("test", RESULTDB_JSON),
)
results = api.step(
name="collect results.json",
cmd=[
"python3.exe" if api.platform.is_win else "python3",
api.resource("find_resultdb.py"),
"--dir",
build_dir,
"--pattern",
"**/%s" % RESULTDB_JSON,
],
step_test_data=lambda: api.raw_io.test_api.stream_output_text(test_data),
stdout=api.raw_io.output_text(add_output_log=True),
)
results_paths = results.stdout.splitlines()
if not results_paths:
return # pragma: no cover
resultdb_base_variant = {
"bucket": api.buildbucket.build.builder.bucket,
"builder": api.buildbucket.build.builder.builder,
}
cmd = ["vpython", api.resource("resultdb.py")]
cmd.extend("--json=%s" % p for p in results_paths)
api.step(
"resultdb",
api.resultdb.wrap(cmd, base_variant=resultdb_base_variant.copy(), include=True),
)
def build_libxml2(api, options, arguments, destdir, ninja_jobs, cipd_dir, manifest):
# libxml2 requires CMAKE_INSTALL_PREFIX to be set to a valid path so replace
# an empty prefix with the destination directory.
install_prefix = "-DCMAKE_INSTALL_PREFIX="
options = [
install_prefix + str(destdir) if option == install_prefix else option
for option in options
]
libxml2_dir = api.path["start_dir"].join("libxml2")
src_dir = libxml2_dir.join("src")
_, revision = api.git.checkout(LIBXML2_GIT, path=src_dir, ref="refs/tags/v2.9.12")
git_checkout = manifest.directories[str(src_dir)].git_checkout
git_checkout.repo_url = LIBXML2_GIT
git_checkout.revision = revision
build_dir = libxml2_dir.join("build")
api.file.ensure_directory("make build dir", build_dir)
with api.context(cwd=build_dir):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-DCMAKE_BUILD_TYPE=Release"]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [
"-DBUILD_SHARED_LIBS=OFF",
"-DLIBXML2_WITH_ICONV=OFF",
"-DLIBXML2_WITH_ICU=OFF",
"-DLIBXML2_WITH_LZMA=OFF",
"-DLIBXML2_WITH_PYTHON=OFF",
"-DLIBXML2_WITH_TESTS=OFF",
# TODO(phosek): lld only links libxml2 and not zlib, so when
# zlib support in libxml2 is enabled, this fails in the case of
# static linking.
"-DLIBXML2_WITH_ZLIB=OFF",
]
+ [src_dir],
)
api.step(
"build",
[cipd_dir.join("ninja"), "-j%d" % ninja_jobs],
)
api.step(
"install",
[cipd_dir.join("ninja"), "install"],
)
return destdir.join("lib", "cmake", "libxml2-2.9.10")
# //zircon/public/gn/toolchain/clang.gni:clang_runtime sets the JSON schema.
#
# This function makes the prototype spec; debug and breakpad info is added by
# runtimes.py.
def make_runtimes_spec(clang_version):
# TODO(fxbug.dev/27110): Ideally this would be done by the cmake build itself.
runtimes = []
for mode_ldflags in [[], ["-static-libstdc++"]]:
for arch, mode_sanitizer_cflags, mode_sanitizer_multilibs, mode_runtimes in [
("x86_64", [], [], []),
("x86_64", ["-fsanitize=address"], ["asan"], ["libclang_rt.asan.so"]),
(
"x86_64",
["-fsanitize=undefined"],
[],
["libclang_rt.ubsan_standalone.so"],
),
("aarch64", [], [], []),
("aarch64", ["-fsanitize=address"], ["asan"], ["libclang_rt.asan.so"]),
(
"aarch64",
["-fsanitize=undefined"],
[],
["libclang_rt.ubsan_standalone.so"],
),
(
"aarch64",
["-fsanitize=hwaddress"],
["hwasan"],
["libclang_rt.hwasan.so"],
),
]:
target_triple = "{arch}-unknown-fuchsia".format(arch=arch)
target = [target_triple, "{arch}-fuchsia".format(arch=arch)]
runtime_dir = "clang/{version}/lib/{target}".format(
version=clang_version, target=target_triple
)
stdlib_dir = "{target}".format(target=target_triple)
mode_cflags = mode_sanitizer_cflags
mode_multilib = "+".join(mode_sanitizer_multilibs)
if mode_multilib:
mode_multilib += "/"
runtime = [{"dist": runtime_dir + "/" + soname} for soname in mode_runtimes]
if not mode_ldflags:
stdlib = [
{
"dist": stdlib_dir + "/" + mode_multilib + soname,
"name": soname.split(".")[0],
}
for soname in [
"libc++.so.2",
"libc++abi.so.1",
"libunwind.so.1",
]
]
runtime.extend(stdlib)
runtimes.append(
{
"target": target,
"cflags": mode_cflags,
"ldflags": mode_ldflags,
"runtime": runtime,
}
)
return runtimes
def RunSteps(
api,
repository,
revision,
platform,
do_2stage,
enable_lto,
enable_lld,
enable_assertions,
enable_backtraces,
use_inliner_model,
builders,
):
use_goma = (
not api.platform.arch == "arm" and api.platform.bits == 64
) and not api.platform.is_win
if use_goma:
api.goma.ensure()
ninja_jobs = api.goma.jobs
goma_context = api.goma.build_with_goma()
else:
ninja_jobs = api.platform.cpu_count
goma_context = contextlib.nullcontext()
# TODO: builders would ideally set this explicitly
if do_2stage is None:
do_2stage = api.buildbucket.builder_id.bucket == "prod"
if enable_lto is None:
if not do_2stage:
enable_lto = False
host_platform = api.cipd_util.platform_name
target_platform = platform or host_platform
use_breakpad = host_platform == "linux-amd64"
manifest = Manifest()
with api.step.nest("ensure_packages"):
with api.context(infra_steps=True):
cipd_dir = api.path["start_dir"].join("cipd")
pkgs = api.cipd.EnsureFile()
# We don't have SDK for linux-arm64 or win, but we only need sysroot.
if (
api.platform.arch == "arm" and api.platform.bits == 64
) or api.platform.is_win:
pkgs.add_package("fuchsia/sdk/core/linux-amd64", "latest", "sdk")
else:
pkgs.add_package("fuchsia/sdk/core/${platform}", "latest", "sdk")
pkgs.add_package("fuchsia/third_party/clang/${platform}", "integration")
# TODO(fxbug.dev/87518) Unify cmake and ninja revisions after test failures are fixed.
if api.platform.is_win:
pkgs.add_package("infra/cmake/${platform}", "version:3.16.1")
elif api.platform.is_mac:
pkgs.add_package(
"fuchsia/third_party/cmake/${platform}",
"git_revision:fab301bb9d6d7d1c92db077fcd4789c0eb03203f",
)
else:
pkgs.add_package(
"fuchsia/third_party/cmake/${platform}",
"integration",
)
pkgs.add_package(
"fuchsia/third_party/ninja/${platform}",
"integration",
)
pkgs.add_package(
"fuchsia/third_party/sysroot/linux",
"tp7-Zyo4pv2SVEoK_eaU6yuKmyxJWcR54vtJKTWpTIYC",
"linux",
)
if use_inliner_model:
pkgs.add_package(
"fuchsia/model/inlining/${platform}", "latest", "model"
)
if use_breakpad:
pkgs.add_package(
"fuchsia/tools/breakpad/${platform}", "latest", "breakpad"
)
ensured = api.cipd.ensure(cipd_dir, pkgs)
for subdir, pins in ensured.items():
directory = manifest.directories[str(cipd_dir.join(subdir))]
directory.cipd_server_host = CIPD_SERVER_HOST
for pin in pins:
directory.cipd_package[pin.package].instance_id = pin.instance_id
staging_dir = api.path["start_dir"].join("staging")
pkg_dir = staging_dir.join("llvm_install")
api.file.ensure_directory("create pkg dir", pkg_dir)
llvm_dir, revision = api.git.checkout_from_build_input(
repository, fallback_ref=revision
)
git_checkout = manifest.directories[str(llvm_dir)].git_checkout
git_checkout.repo_url = repository
git_checkout.revision = revision
target_triple = api.toolchain.PLATFORM_TO_TRIPLE[target_platform]
host_triple = api.toolchain.PLATFORM_TO_TRIPLE[host_platform]
with api.macos_sdk(kind="ios"), api.windows_sdk(), goma_context:
if api.platform.name == "linux":
target_sysroot = host_sysroot = cipd_dir.join("linux")
elif api.platform.name == "mac":
# TODO(fxbug.dev/3043): Eventually use our own hermetic sysroot as for Linux.
step_result = api.step(
"xcrun",
["xcrun", "--sdk", "macosx", "--show-sdk-path"],
stdout=api.raw_io.output_text(name="sdk-path", add_output_log=True),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
"/some/xcode/path"
),
)
target_sysroot = host_sysroot = step_result.stdout.strip()
elif api.platform.name == "win":
target_sysroot = host_sysroot = api.windows_sdk.sdk_dir
else: # pragma: no cover
assert False, "unsupported platform"
arguments = {
"target_triple": target_triple,
"host_triple": host_triple,
"target_sysroot": target_sysroot,
"host_sysroot": host_sysroot,
"linux_sysroot": cipd_dir.join("linux"),
"fuchsia_sdk": cipd_dir.join("sdk"),
}
arguments.update(
{
"win": {
"cc": cipd_dir.join("bin", "clang-cl.exe"),
"cxx": cipd_dir.join("bin", "clang-cl.exe"),
"ar": cipd_dir.join("bin", "llvm-lib.exe"),
"ld": cipd_dir.join("bin", "lld-link.exe"),
"mt": cipd_dir.join("bin", "llvm-mt.exe"),
"nm": cipd_dir.join("bin", "llvm-nm.exe"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy.exe"),
"objdump": cipd_dir.join("bin", "llvm-objdump.exe"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib.exe"),
"rc": cipd_dir.join("bin", "llvm-rc.exe"),
"readelf": cipd_dir.join("bin", "llvm-readelf.exe"),
"strip": cipd_dir.join("bin", "llvm-strip.exe"),
"ninja": cipd_dir.join("ninja.exe"),
},
"mac": {
"cc": cipd_dir.join("bin", "clang"),
"cxx": cipd_dir.join("bin", "clang++"),
"ar": cipd_dir.join("bin", "llvm-ar"),
"ld": "/usr/bin/ld",
"libtool": cipd_dir.join("bin", "llvm-libtool-darwin"),
"lipo": cipd_dir.join("bin", "llvm-lipo"),
"nm": cipd_dir.join("bin", "llvm-nm"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy"),
"objdump": cipd_dir.join("bin", "llvm-objdump"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib"),
"readelf": cipd_dir.join("bin", "llvm-readelf"),
"strip": cipd_dir.join("bin", "llvm-strip"),
"ninja": cipd_dir.join("ninja"),
},
"linux": {
"cc": cipd_dir.join("bin", "clang"),
"cxx": cipd_dir.join("bin", "clang++"),
"ar": cipd_dir.join("bin", "llvm-ar"),
"ld": cipd_dir.join("bin", "ld.lld"),
"nm": cipd_dir.join("bin", "llvm-nm"),
"objcopy": cipd_dir.join("bin", "llvm-objcopy"),
"objdump": cipd_dir.join("bin", "llvm-objdump"),
"ranlib": cipd_dir.join("bin", "llvm-ranlib"),
"readelf": cipd_dir.join("bin", "llvm-readelf"),
"strip": cipd_dir.join("bin", "llvm-strip"),
"ninja": cipd_dir.join("ninja"),
},
}[api.platform.name]
)
if use_goma:
arguments.update({"gomacc": api.goma.goma_dir.join("gomacc")})
options = [
"-GNinja",
"-DCMAKE_MAKE_PROGRAM={ninja}",
"-DCMAKE_INSTALL_PREFIX=",
"-DCMAKE_C_COMPILER={cc}",
"-DCMAKE_CXX_COMPILER={cxx}",
"-DCMAKE_ASM_COMPILER={cc}",
]
if use_goma:
options.extend(
[
"-DCMAKE_C_COMPILER_LAUNCHER={gomacc}",
"-DCMAKE_CXX_COMPILER_LAUNCHER={gomacc}",
"-DCMAKE_ASM_COMPILER_LAUNCHER={gomacc}",
]
)
options.extend(
{
"linux": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
],
"mac": ["-DCMAKE_LIPO={lipo}"],
"win": [
"-DCMAKE_AR={ar}",
"-DCMAKE_LINKER={ld}",
"-DCMAKE_NM={nm}",
"-DCMAKE_OBJCOPY={objcopy}",
"-DCMAKE_OBJDUMP={objdump}",
"-DCMAKE_RANLIB={ranlib}",
"-DCMAKE_READELF={readelf}",
"-DCMAKE_STRIP={strip}",
# TODO(phosek): reenable once we update the host toolchain
# "-DCMAKE_RC_COMPILER={rc}",
# "-DCMAKE_MT={mt}",
],
}[api.platform.name]
)
options.extend(["-DCMAKE_SYSROOT={target_sysroot}"])
# TODO(phosek): consider moving these to a cache file
platform_options = []
if api.platform.is_mac:
platform_options.extend(["-DCMAKE_OSX_DEPLOYMENT_TARGET=10.13"])
if api.platform.is_win:
platform_options.extend(["-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded"])
with api.step.nest("zlib"):
zlib_install_dir = staging_dir.join("zlib_install")
api.file.ensure_directory("create zlib_install_dir", zlib_install_dir)
build_zlib(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX"]
]
+ [
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
zlib_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
with api.step.nest("libxml2"):
libxml2_install_dir = staging_dir.join("libxml2_install")
api.file.ensure_directory("create libxml2_install_dir", libxml2_install_dir)
libxml2_cmake_dir = build_libxml2(
api,
options
+ platform_options
+ [
# TODO: once we upgrade to CMake 3.20, we can use
# CMAKE_{C,CXX}_COMPILER_TARGET
"-DCMAKE_%s_FLAGS=--target=%s" % (lang, target_triple)
for lang in ["C", "CXX"]
]
+ [
"-DZLIB_INCLUDE_DIR=%s" % zlib_install_dir.join("include"),
"-DZLIB_LIBRARY=%s"
% zlib_install_dir.join(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
# TODO: once we no longer build libLTO, we can drop this
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON",
],
arguments,
libxml2_install_dir,
ninja_jobs,
cipd_dir,
manifest,
)
api.file.write_proto(
"source manifest", pkg_dir.join("source_manifest.json"), manifest, "JSONPB"
)
# build clang+llvm
build_dir = staging_dir.join("llvm_build")
api.file.ensure_directory("create llvm build dir", build_dir)
arguments.update(
{
"BOOTSTRAP_": "BOOTSTRAP_",
"STAGE2_": "STAGE2_",
"stage2_": "stage2-",
"_stage2": "",
}
if do_2stage
else {"BOOTSTRAP_": "", "STAGE2_": "", "stage2_": "", "_stage2": "-stage2"}
)
llvm_projects = ["clang", "clang-tools-extra", "lld", "llvm", "polly"]
llvm_runtimes = ["compiler-rt", "libcxx", "libcxxabi", "libunwind"]
options.extend(
[
"-D{BOOTSTRAP_}LLVM_ENABLE_ZLIB=FORCE_ON",
"-D{BOOTSTRAP_}ZLIB_INCLUDE_DIR=%s" % zlib_install_dir.join("include"),
"-D{BOOTSTRAP_}ZLIB_LIBRARY=%s"
% zlib_install_dir.join(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
"-D{BOOTSTRAP_}LLVM_ENABLE_LIBXML2=FORCE_ON",
# Note that the LibXml2 spelling has to match the spelling used in
# https://github.com/llvm/llvm-project/blob/cf54424a/llvm/cmake/config-ix.cmake#L144
"-D{BOOTSTRAP_}LibXml2_DIR=%s" % libxml2_cmake_dir,
"-D{BOOTSTRAP_}CMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
)
if do_2stage:
options.extend(
[
"-DLLVM_ENABLE_ZLIB=FORCE_ON",
"-DZLIB_INCLUDE_DIR=%s" % zlib_install_dir.join("include"),
"-DZLIB_LIBRARY=%s"
% zlib_install_dir.join(
"lib", "zlibstatic.lib" if api.platform.is_win else "libz.a"
),
"-DLLVM_ENABLE_LIBXML2=FORCE_ON",
"-DLibXml2_DIR=%s" % libxml2_cmake_dir,
"-DCMAKE_FIND_PACKAGE_PREFER_CONFIG=ON",
]
)
if api.platform.name == "linux":
if do_2stage:
options.extend(
[
"-D{BOOTSTRAP_}CMAKE_SYSROOT={target_sysroot}",
]
+ [
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
+ [
# Unprefixed flags are used by the first stage compiler.
"-DCMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
)
# TODO(fxbug.dev/81937)
# This is a temporary work around to resolve the out of memory issue that
# linux-arm64 builders running into, and should be removed when we implement
# support for fat LTO in Clang.
if api.platform.arch == "arm":
options.extend(
[
"-D{STAGE2_}LLVM_PARALLEL_LINK_JOBS=8",
]
)
else:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-static-libstdc++" % mode
for mode in ["SHARED", "MODULE", "EXE"]
]
)
if host_triple != target_triple: # pragma: no cover
options.extend(
[
"-D{BOOTSTRAP_}CMAKE_SYSTEM_NAME=Linux",
"-D{BOOTSTRAP_}CMAKE_C_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}CMAKE_CXX_COMPILER_TARGET={target_triple}",
"-D{BOOTSTRAP_}LLVM_DEFAULT_TARGET_TRIPLE={target_triple}",
]
)
elif api.platform.name == "mac":
if do_2stage:
options.extend(
["-D{BOOTSTRAP_}CMAKE_SYSROOT={target_sysroot}"]
+ [
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, build_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
+ [
# Unprefixed flags are used by the first stage compiler.
"-DCMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
)
else:
options.extend(
[
# BOOTSTRAP_ prefixed flags are passed to the second stage compiler.
"-D{BOOTSTRAP_}CMAKE_%s_LINKER_FLAGS=-nostdlib++ %s"
% (mode, cipd_dir.join("lib", "libc++.a"))
for mode in ["SHARED", "MODULE", "EXE"]
]
)
# STAGE2_ prefixed flags are passed to the second stage by the first stage build.
options.extend(
[
"-D{STAGE2_}LINUX_aarch64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_armv7-unknown-linux-gnueabihf_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_i386-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}LINUX_x86_64-unknown-linux-gnu_SYSROOT={linux_sysroot}",
"-D{STAGE2_}FUCHSIA_SDK={fuchsia_sdk}",
"-D{STAGE2_}LLVM_LIT_ARGS=--resultdb-output=%s" % RESULTDB_JSON,
]
)
if enable_lto is not None:
options.extend(["-D{STAGE2_}LLVM_ENABLE_LTO=%s" % str(enable_lto).title()])
if enable_lld is not None:
options.extend(["-D{STAGE2_}LLVM_ENABLE_LLD=%s" % str(enable_lld).title()])
if enable_assertions:
options.extend(
[
"-D{STAGE2_}LLVM_ENABLE_ASSERTIONS=%s"
% str(enable_assertions).title()
]
)
if enable_backtraces:
options.extend(
[
"-D{STAGE2_}LLVM_ENABLE_BACKTRACES=%s"
% str(enable_backtraces).title()
]
)
if use_inliner_model and not (
api.platform.arch == "arm" and api.platform.bits == 64
):
venv_dir = api.path["start_dir"].join("tensorflow-venv")
tensorflow_path = api.step(
"get tensorflow",
cmd=[
"python",
"-u",
api.resource("get_tensorflow.py"),
"-vpython-root",
venv_dir,
],
stdout=api.raw_io.output_text(
name="tensorflow-path", add_output_log=True
),
step_test_data=lambda: api.raw_io.test_api.stream_output_text(
"%s"
% venv_dir.join("lib", "python3.8", "site-packages", "tensorflow")
),
).stdout.strip()
options.extend(
[
"-D{STAGE2_}TENSORFLOW_AOT_PATH=%s" % tensorflow_path,
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_HEADER_INLINERSIZEMODEL=%s"
% cipd_dir.join("model", "InlinerSizeModel.h"),
"-D{STAGE2_}LLVM_OVERRIDE_MODEL_OBJECT_INLINERSIZEMODEL=%s"
% cipd_dir.join("model", "InlinerSizeModel.o"),
"-D{STAGE2_}LLVM_RAEVICT_MODEL_PATH=none",
]
)
if api.platform.is_win:
# TODO(fxbug.dev/75901): Remove this once we roll new Windows image.
env_prefixes = {
"PATH": ["C:\\Program Files\\Git\\usr\\bin", cipd_dir.join("bin")]
}
else:
env_prefixes = {"PATH": [cipd_dir.join("bin")]}
with api.step.nest("clang"), api.context(
cwd=build_dir, env_prefixes=env_prefixes
):
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ [slashes(api, option.format(**arguments)) for option in options]
+ [
"-C",
llvm_dir.join(
"clang",
"cmake",
"caches",
"Fuchsia{_stage2}.cmake".format(**arguments),
),
llvm_dir.join("llvm"),
],
)
api.file.read_text(
"read CMakeError.log", build_dir.join("CMakeFiles", "CMakeError.log")
)
# Build the full (two-stage) distribution.
api.step(
"build",
[
cipd_dir.join("ninja"),
# This only applies to the first stage, second stage is invoked by
# CMake as a subprocess and will use Ninja's default.
"-j%d" % ninja_jobs,
"{stage2_}distribution".format(**arguments),
],
)
# Run the tests.
projects = ["clang", "lld", "llvm", "polly"]
# TODO(leonardchan): run host runtime tests for mac.
if (
host_triple in ["x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu"]
and not do_2stage
):
projects += ["runtimes-" + host_triple]
with resultdb_context(api, build_dir):
api.step(
"test",
[cipd_dir.join("ninja"), "-j%d" % api.goma.jobs, "-k0"]
+ [
("{stage2_}check-" + project).format(**arguments)
for project in projects
],
)
with api.context(env={"DESTDIR": pkg_dir}):
api.step(
"install",
[
cipd_dir.join("ninja"),
"{stage2_}install-distribution".format(**arguments),
],
)
if api.platform.is_mac:
# build libunwind, libcxxabi and libcxx for Apple Silicon
runtimes_build_dir = staging_dir.join("runtimes_build")
with api.step.nest("runtimes"):
runtimes_options = [
"-GNinja",
"-DCMAKE_MAKE_PROGRAM={ninja}",
"-DCMAKE_INSTALL_PREFIX=",
"-DCMAKE_ASM_COMPILER=%s" % pkg_dir.join("bin", "clang"),
"-DCMAKE_ASM_COMPILER_TARGET=arm64-apple-darwin",
"-DCMAKE_C_COMPILER=%s" % pkg_dir.join("bin", "clang"),
"-DCMAKE_C_COMPILER_TARGET=arm64-apple-darwin",
"-DCMAKE_CXX_COMPILER=%s" % pkg_dir.join("bin", "clang++"),
"-DCMAKE_CXX_COMPILER_TARGET=arm64-apple-darwin",
"-DCMAKE_APPLE_SILICON_PROCESSOR=arm64",
"-DCMAKE_SYSROOT={target_sysroot}",
"-DLLVM_ENABLE_RUNTIMES=libunwind;libcxxabi;libcxx",
"-DLIBUNWIND_ENABLE_SHARED=OFF",
"-DLIBUNWIND_INSTALL_LIBRARY=OFF",
"-DLIBCXXABI_ENABLE_SHARED=OFF",
"-DLIBCXXABI_INSTALL_LIBRARY=OFF",
"-DLIBCXXABI_ENABLE_STATIC_UNWINDER=ON",
"-DLIBCXXABI_USE_LLVM_UNWINDER=ON",
"-DLIBCXX_ENABLE_SHARED=OFF",
"-DLIBCXX_ENABLE_STATIC_ABI_LIBRARY=ON",
"-DLIBCXX_ABI_VERSION=2",
]
api.step(
"configure",
[cipd_dir.join("bin", "cmake")]
+ ["-B", runtimes_build_dir, "-S", llvm_dir.join("runtimes")]
+ [option.format(**arguments) for option in runtimes_options],
)
api.step(
"build",
[cipd_dir.join("ninja"), "-C", runtimes_build_dir],
)
for lib in ["libc++.a", "libc++experimental.a"]:
api.step(
"lipo %s" % lib,
[
pkg_dir.join("bin", "llvm-lipo"),
"-create",
pkg_dir.join("lib", lib),
runtimes_build_dir.join("lib", lib),
"-output",
pkg_dir.join("lib", lib),
],
)
step_result = api.file.read_text(
"Version.inc",
build_dir.join(
*(
(["tools", "clang", "stage2-bins"] if do_2stage else [])
+ ["tools", "clang", "include", "clang", "Basic", "Version.inc"]
)
),
test_data='#define CLANG_VERSION_STRING "8.0.0"',
)
m = re.search(r'CLANG_VERSION_STRING "([a-zA-Z0-9.-]+)"', step_result)
assert m, "Cannot determine Clang version"
clang_version = m.group(1)
api.toolchain.strip_runtimes(
"generate runtime.json",
spec=make_runtimes_spec(clang_version),
path=pkg_dir.join("lib"),
build_id_subpath="debug/.build-id",
readelf=cipd_dir.join("bin", "llvm-readelf"),
objcopy=cipd_dir.join("bin", "llvm-objcopy"),
dump_syms=(
cipd_dir.join("breakpad", "dump_syms", "dump_syms")
if use_breakpad
else None
),
)
api.step(
"generate license",
cmd=["python", "-u", api.resource("generate_license.py"), "--include"]
+ [
llvm_dir.join(project, "LICENSE.TXT")
for project in llvm_projects + llvm_runtimes
]
+ [
"--extract",
llvm_dir.join("polly", "lib", "External", "isl", "LICENSE"),
"-",
"--extract",
api.path["start_dir"].join("libxml2", "src", "Copyright"),
"-",
"--extract",
api.path["start_dir"].join("zlib", "src", "zlib.h"),
"4-22",
],
stdout=api.raw_io.output_text(leak_to=pkg_dir.join("LICENSE")),
)
digest = api.cas_util.upload(pkg_dir, output_property="isolated")
if api.buildbucket.builder_id.bucket == "prod":
# The published package has the same name for every platform.
api.cipd_util.upload_package(
"fuchsia/third_party/clang/%s" % target_platform,
pkg_dir,
[pkg_dir],
{"git_revision": revision},
repository=repository,
metadata=[("version", clang_version)],
)
# TODO(phosek): move this logic to clang_trigger.py recipe.
if not target_platform in ["linux-arm64", "windows-amd64"]:
# Do a full integration build. This will use the just-built toolchain
# to build all of Fuchsia to check whether there are any regressions.
api.toolchain.trigger_build(
"clang_toolchain",
repository,
revision,
digest,
builders=builders[target_platform],
)
def GenTests(api):
builders = {
"linux-amd64": ["linux-x64-builder"],
"mac-amd64": ["mac-x64-builder"],
}
for os in ("linux", "mac"):
yield (
api.status_check.test("ci_%s_x64" % os)
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(
platform=os + "-amd64",
enable_assertions=True,
enable_backtraces=True,
builders=builders,
)
)
yield (
api.status_check.test("prod_%s_x64" % os)
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name(os)
+ api.properties(platform=os + "-amd64", builders=builders)
+ api.git.get_remote_branch_head("git ls-remote", "b" * 40)
)
yield (
api.status_check.test("windows_amd64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("win")
+ api.properties(platform="windows-amd64", builders=builders)
)
yield (
api.status_check.test("linux_arm64")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="prod",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("linux")
+ api.platform.arch("arm")
+ api.platform.bits(64)
+ api.properties(
platform="linux-arm64", use_inliner_model=True, builders=builders
)
)
yield (
api.status_check.test("mac_lld")
+ api.buildbucket.ci_build(
project="fuchsia",
bucket="ci",
git_repo="https://fuchsia.googlesource.com/third_party/llvm-project",
revision="a" * 40,
)
+ api.platform.name("mac")
+ api.properties(platform="mac-amd64", enable_lld=True, builders=builders)
)