Skip to content

Commit

Permalink
refactor the compilation and linking pipeline to use subsystems
Browse files Browse the repository at this point in the history
  • Loading branch information
cosmicexplorer committed Jun 7, 2018
1 parent f84eed2 commit 3843d5f
Show file tree
Hide file tree
Showing 6 changed files with 242 additions and 201 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).

from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)

from pants.subsystem.subsystem import Subsystem


class NativeCompileSettings(Subsystem):
"""???"""

# FIXME: add NB: to note how you have to override this or whatever
default_header_file_extensions = None
default_source_file_extensions = None

@classmethod
def register_options(cls, register):
super(NativeCompileSettings, cls).register_options(register)

register('--strict-deps', type=bool, default=True, fingerprint=True, advanced=True,
help='???/The default for the "strict_deps" argument for targets of this language.')

register('--fatal-warnings', type=bool, default=True, fingerprint=True, advanced=True,
help='???/The default for the "fatal_warnings" argument for targets of this language.')

# TODO: make a list of file extension option type?
register('--header-file-extensions', type=list, default=cls.default_header_file_extensions,
fingerprint=True, advanced=True,
help='???/the allowed file extensions, as a list of strings (file extensions)')
register('--source-file-extensions', type=list, default=cls.default_source_file_extensions,
fingerprint=True, advanced=True,
help='???/the allowed file extensions, as a list of strings (file extensions)')

def get_subsystem_target_mirrored_field_value(self, field_name, target):
"""???/for fields with the same name on subsystems and tasks"""
tgt_setting = getattr(target, field_name)
if tgt_setting is None:
return getattr(self.get_options(), field_name)
return tgt_setting


class CCompileSettings(NativeCompileSettings):
options_scope = 'c-compile-settings'

default_header_file_extensions = ['.h']
default_source_file_extensions = ['.c']


class CppCompileSettings(NativeCompileSettings):
options_scope = 'cpp-compile-settings'

default_header_file_extensions = ['.h', '.hpp', '.tpp']
default_source_file_extensions = ['.cpp', '.cxx', '.cc']
66 changes: 21 additions & 45 deletions src/python/pants/backend/native/tasks/c_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,24 @@
import os

from pants.backend.native.config.environment import CCompiler
from pants.backend.native.subsystems.native_compile_settings import CCompileSettings
from pants.backend.native.subsystems.native_toolchain import NativeToolchain
from pants.backend.native.targets.c_library import CLibrary
from pants.backend.native.tasks.native_compile import NativeCompile, ObjectFiles
from pants.backend.native.tasks.native_compile import NativeCompile
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.util.contextutil import get_joined_path
from pants.util.memo import memoized_property
from pants.util.objects import SubclassesOf, datatype
from pants.util.objects import SubclassesOf
from pants.util.process_handler import subprocess


class CCompileRequest(datatype([
'c_compiler',
'include_dirs',
'sources',
'fatal_warnings',
'output_dir',
])): pass


class CCompile(NativeCompile):

default_header_file_extensions = ['.h']
default_source_file_extensions = ['.c']
# Compile only C library targets.
source_target_constraint = SubclassesOf(CLibrary)

compile_settings_class = CCompileSettings

@classmethod
def implementation_version(cls):
Expand All @@ -39,48 +34,34 @@ def implementation_version(cls):
class CCompileError(TaskError):
"""???"""

# Compile only C library targets.
source_target_constraint = SubclassesOf(CLibrary)
@classmethod
def subsystem_dependencies(cls):
return super(CCompile, cls).subsystem_dependencies() + (NativeToolchain.scoped(cls),)

@memoized_property
def _toolchain(self):
return NativeToolchain.scoped_instance(self)

@memoized_property
def c_compiler(self):
return self._request_single(CCompiler, self._toolchain)

# FIXME: note somewhere that this means source file names within a target must be unique (even if
# the files are in different subdirectories) -- check this at the target level!!!
def collect_cached_objects(self, versioned_target):
return ObjectFiles(versioned_target.results_dir, os.listdir(versioned_target.results_dir))

def compile(self, versioned_target):
compile_request = self._make_compile_request(versioned_target)
return self._execute_compile_request(compile_request)

def _make_compile_request(self, vt):
include_dirs = self.include_dirs_for_target(vt.target)
self.context.log.debug("include_dirs: {}".format(include_dirs))
sources_by_type = self.get_sources_headers_for_target(vt.target)
fatal_warnings = self.get_task_target_field_value('fatal_warnings', vt.target)
return CCompileRequest(
c_compiler=self.c_compiler,
include_dirs=include_dirs,
sources=sources_by_type.sources,
fatal_warnings=fatal_warnings,
output_dir=vt.results_dir)

def _execute_compile_request(self, compile_request):
def get_compiler(self):
return self.c_compiler

def compile(self, compile_request):
sources = compile_request.sources
output_dir = compile_request.output_dir

if len(sources) == 0:
# FIXME: do we need this log message? Should we still have it for intentionally header-only
# libraries (that might be a confusing message to see)?
self.context.log.debug("no sources in request {}, skipping".format(compile_request))
return ObjectFiles(output_dir, [])

return

# TODO: add -fPIC, but only to object files used for shared libs (how do we determine that?) --
# alternatively, only allow using native code to build shared libs.
c_compiler = compile_request.c_compiler
c_compiler = compile_request.compiler
err_flags = ['-Werror'] if compile_request.fatal_warnings else []
# We are executing in the results_dir, so get absolute paths for everything.
# TODO: -fPIC all the time???
Expand Down Expand Up @@ -109,8 +90,3 @@ def _execute_compile_request(self, compile_request):
raise self.CCompileError(
"Error compiling C sources with command {} for request {}. Exit code was: {}."
.format(cmd, compile_request, rc))

# NB: We take everything produced in the output directory without verifying its correctness.
ret = ObjectFiles(output_dir, os.listdir(output_dir))
self.context.log.debug("ret: {}".format(ret))
return ret
61 changes: 21 additions & 40 deletions src/python/pants/backend/native/tasks/cpp_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,24 @@
import os

from pants.backend.native.config.environment import CppCompiler
from pants.backend.native.subsystems.native_compile_settings import CppCompileSettings
from pants.backend.native.subsystems.native_toolchain import NativeToolchain
from pants.backend.native.targets.cpp_library import CppLibrary
from pants.backend.native.tasks.native_compile import NativeCompile, ObjectFiles
from pants.backend.native.tasks.native_compile import NativeCompile
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.util.contextutil import get_joined_path
from pants.util.memo import memoized_property
from pants.util.objects import SubclassesOf, datatype
from pants.util.objects import SubclassesOf
from pants.util.process_handler import subprocess


class CppCompileRequest(datatype([
'cpp_compiler',
'include_dirs',
'sources',
'fatal_warnings',
'output_dir',
])): pass


class CppCompile(NativeCompile):

default_header_file_extensions = ['.h', '.hpp', '.tpp']
default_source_file_extensions = ['.cpp', '.cxx', '.cc']
# Compile only C++ library targets.
source_target_constraint = SubclassesOf(CppLibrary)

compile_settings_class = CppCompileSettings

@classmethod
def implementation_version(cls):
Expand All @@ -39,41 +34,30 @@ def implementation_version(cls):
class CppCompileError(TaskError):
"""???"""

source_target_constraint = SubclassesOf(CppLibrary)
@classmethod
def subsystem_dependencies(cls):
return super(CppCompile, cls).subsystem_dependencies() + (NativeToolchain.scoped(cls),)

@memoized_property
def _toolchain(self):
return NativeToolchain.scoped_instance(self)

@memoized_property
def cpp_compiler(self):
return self._request_single(CppCompiler, self._toolchain)

# FIXME: note somewhere that this means source file names within a target must be unique -- check
# this at the target level!!!
def collect_cached_objects(self, versioned_target):
return ObjectFiles(versioned_target.results_dir, os.listdir(versioned_target.results_dir))

def compile(self, versioned_target):
compile_request = self._make_compile_request(versioned_target)
return self._execute_compile_request(compile_request)

def _make_compile_request(self, vt):
include_dirs = self.include_dirs_for_target(vt.target)
sources_by_type = self.get_sources_headers_for_target(vt.target)
fatal_warnings = self.get_task_target_field_value('fatal_warnings', vt.target)
return CppCompileRequest(
cpp_compiler=self.cpp_compiler,
include_dirs=include_dirs,
sources=sources_by_type.sources,
fatal_warnings=fatal_warnings,
output_dir=vt.results_dir)

def _execute_compile_request(self, compile_request):
def get_compiler(self):
return self.cpp_compiler

def compile(self, compile_request):
sources = compile_request.sources
output_dir = compile_request.output_dir

if len(sources) == 0:
self.context.log.debug("no sources for request {}, skipping".format(compile_request))
return ObjectFiles(output_dir, [])
return

cpp_compiler = compile_request.cpp_compiler
cpp_compiler = compile_request.compiler
err_flags = ['-Werror'] if compile_request.fatal_warnings else []
# We are executing in the results_dir, so get absolute paths for everything.
# TODO: -fPIC all the time???
Expand Down Expand Up @@ -102,6 +86,3 @@ def _execute_compile_request(self, compile_request):
raise self.CppCompileError(
"Error compiling C++ sources with command {} for request {}. Exit code was: {}."
.format(cmd, compile_request, rc))

# NB: We take everything produced in the output directory without verifying its correctness.
return ObjectFiles(output_dir, os.listdir(output_dir))
52 changes: 33 additions & 19 deletions src/python/pants/backend/native/tasks/link_shared_libraries.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
import os

from pants.backend.native.config.environment import Linker
from pants.backend.native.subsystems.native_toolchain import NativeToolchain
from pants.backend.native.targets.native_library import NativeLibrary
from pants.backend.native.tasks.native_compile import ObjectFiles
from pants.backend.native.tasks.native_compile import NativeTargetDependencies, ObjectFiles
from pants.backend.native.tasks.native_task import NativeTask
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.util.collections import assert_single_element
from pants.util.contextutil import get_joined_path
from pants.util.memo import memoized_property
from pants.util.objects import datatype
Expand All @@ -38,6 +40,7 @@ def product_types(cls):

@classmethod
def prepare(cls, options, round_manager):
round_manager.require(NativeTargetDependencies)
round_manager.require(ObjectFiles)

@property
Expand All @@ -51,12 +54,29 @@ def implementation_version(cls):
class LinkSharedLibrariesError(TaskError):
"""???"""

@classmethod
def subsystem_dependencies(cls):
return super(LinkSharedLibraries, cls).subsystem_dependencies() + (NativeToolchain.scoped(cls),)

@memoized_property
def _toolchain(self):
return NativeToolchain.scoped_instance(self)

@memoized_property
def linker(self):
return self._request_single(Linker, self._toolchain)

def _retrieve_single_product_at_target_base(self, product_mapping, target):
self.context.log.debug("product_mapping: {}".format(product_mapping))
self.context.log.debug("target: {}".format(target))
product = product_mapping.get(target)
single_base_dir = assert_single_element(product.keys())
single_product = assert_single_element(product[single_base_dir])
return single_product

def execute(self):
targets_providing_artifacts = self.context.targets(NativeLibrary.provides_native_artifact)
native_target_deps_product = self.context.products.get(NativeTargetDependencies)
compiled_objects_product = self.context.products.get(ObjectFiles)
shared_libs_product = self.context.products.get(SharedLibrary)

Expand All @@ -66,41 +86,35 @@ def execute(self):
if vt.valid:
shared_library = self._retrieve_shared_lib_from_cache(vt)
else:
link_request = self._make_link_request(vt, compiled_objects_product)
link_request = self._make_link_request(
vt, compiled_objects_product, native_target_deps_product)
shared_library = self._execute_link_request(link_request)

# FIXME: de-dup libs by name? just disallow it i think
shared_libs_product.add(vt.target, vt.target.target_base).append(shared_library)

def _retrieve_shared_lib_from_cache(self, vt):
native_artifact = vt.target.provides
path_to_cached_lib = os.path.join(vt.results_dir, native_artifact.as_filename(self.linker.platform))
path_to_cached_lib = os.path.join(
vt.results_dir, native_artifact.as_filename(self.linker.platform))
# TODO: check if path exists!!
return SharedLibrary(name=native_artifact.lib_name, path=path_to_cached_lib)

def _make_link_request(self, vt, compiled_objects_product):
# FIXME: should coordinate to ensure we get the same deps for link and compile (could put that
# in the ObjectFiles type tbh)
deps = self.native_deps(vt.target)

def _make_link_request(self, vt, compiled_objects_product, native_target_deps_product):
self.context.log.debug("link target: {}".format(vt.target))

deps = self._retrieve_single_product_at_target_base(native_target_deps_product, vt.target)

all_compiled_object_files = []

for dep_tgt in deps:
self.context.log.debug("dep_tgt: {}".format(dep_tgt))
product_mapping = compiled_objects_product.get(dep_tgt)
base_dirs = product_mapping.keys()
assert(len(base_dirs) == 1)
single_base_dir = base_dirs[0]
object_files_list = product_mapping[single_base_dir]
assert(len(object_files_list) == 1)
single_product = object_files_list[0]
self.context.log.debug("single_product: {}".format(single_product))
object_files_for_target = single_product.file_paths()
self.context.log.debug("object_files_for_target: {}".format(object_files_for_target))
object_files = self._retrieve_single_product_at_target_base(compiled_objects_product, dep_tgt)
self.context.log.debug("object_files: {}".format(object_files))
object_file_paths = object_files.file_paths()
self.context.log.debug("object_file_paths: {}".format(object_file_paths))
# TODO: dedup object file paths? can we assume they are already deduped?
all_compiled_object_files.extend(object_files_for_target)
all_compiled_object_files.extend(object_file_paths)

return LinkSharedLibraryRequest(
linker=self.linker,
Expand Down
Loading

0 comments on commit 3843d5f

Please sign in to comment.