Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
Danielle Sikich committed Jun 14, 2021
2 parents cd770fc + 900a1c9 commit d6f9b68
Show file tree
Hide file tree
Showing 84 changed files with 1,605 additions and 395 deletions.
14 changes: 1 addition & 13 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
fail-fast: false
matrix:
distro: [centos.7, centos.8, leap.15, ubuntu.20.04]
distro: [centos.7, leap.15]
compiler: [gcc, clang]
steps:
- name: Checkout code
Expand All @@ -36,15 +36,3 @@ jobs:
run: docker build . -f utils/docker/Dockerfile.${{ matrix.distro }}
--build-arg COMPILER=${{ matrix.compiler }}
--build-arg DEPS_JOBS=8

Spelling:
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Run check
uses: codespell-project/actions-codespell@master
with:
skip: ./src/control/vendor,./.git
ignore_words_file: ci/codespell.ignores
builtin: clear,rare,informal,names,en-GB_to_en-US
23 changes: 17 additions & 6 deletions .github/workflows/ci2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ on:
push:
branches:
- master
- 'release/*'
pull_request:

jobs:
Expand All @@ -27,8 +28,8 @@ jobs:
- uses: satackey/[email protected]
continue-on-error: true
with:
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github') }}-{hash}
restore-keys: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github') }}-
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github', '.dockerignore') }}-{hash}
restore-keys: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github', '.dockerignore') }}-
- name: Prepare base image in Docker
run: docker build . --file utils/docker/Dockerfile.${{ matrix.distro }}
--build-arg DAOS_DEPS_BUILD=no --build-arg UID
Expand Down Expand Up @@ -64,7 +65,7 @@ jobs:
- uses: satackey/[email protected]
continue-on-error: true
with:
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github') }}-{hash}
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github', '.dockerignore') }}-{hash}
restore-keys: docker-${{ matrix.distro }}-
skip-save: true
- name: Update dependencies in image.
Expand All @@ -74,6 +75,12 @@ jobs:
--tag build-image
- name: Run
run: ./utils/run_in_ga_wrapper.sh
- name: Publish NLT test results
if: always()
uses: EnricoMi/[email protected]
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
files: nlt-junit.xml

Build:
name: Build DAOS
Expand All @@ -82,8 +89,8 @@ jobs:
strategy:
fail-fast: false
matrix:
# Run this on distros where we don't test.
distro: [centos.8]
# Run this on distros where we don't run the ci1 Workflow.
distro: [ubuntu.20.04, centos.8]
compiler: [clang, gcc]
steps:
- name: Checkout code
Expand All @@ -94,7 +101,7 @@ jobs:
- uses: satackey/[email protected]
continue-on-error: true
with:
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github') }}-{hash}
key: docker-${{ matrix.distro }}-${{ hashFiles('site_scons', 'utils/docker', 'utils/build.config', 'SConstruct', '.github', '.dockerignore') }}-{hash}
restore-keys: docker-${{ matrix.distro }}-
skip-save: true
- name: Update dependencies in image.
Expand All @@ -106,3 +113,7 @@ jobs:
--build-arg DAOS_JAVA_BUILD=no
--build-arg DEPS_JOBS=10
--build-arg COMPILER=${{ matrix.compiler }}
- name: Build Java in docker.
run: docker build . --file utils/docker/Dockerfile.${{ matrix.distro }}
--build-arg DEPS_JOBS=10
--build-arg COMPILER=${{ matrix.compiler }}
22 changes: 22 additions & 0 deletions .github/workflows/spelling.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: Codespell

on:
push:
branches:
- master
- 'release/*'
pull_request:

jobs:

Spelling:
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Run check
uses: codespell-project/actions-codespell@master
with:
skip: ./src/control/vendor,./.git
ignore_words_file: ci/codespell.ignores
builtin: clear,rare,informal,names,en-GB_to_en-US
63 changes: 31 additions & 32 deletions SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@ try:
except NameError:
pass

from prereq_tools import PreReqComponent
import daos_build
from prereq_tools import PreReqComponent


DESIRED_FLAGS = ['-Wno-gnu-designator',
'-Wno-missing-braces',
'-Wno-ignored-attributes',
Expand Down Expand Up @@ -167,15 +165,19 @@ def set_defaults(env, daos_version):
if env.get('BUILD_TYPE') != 'release':
env.Append(CCFLAGS=['-DFAULT_INJECTION=1'])

def preload_prereqs(prereqs):
"""Preload prereqs specific to platform"""
def build_misc():
"""Build miscellaneous items"""
# install the configuration files
SConscript('utils/config/SConscript')

# install certificate generation files
SConscript('utils/certs/SConscript')

prereqs.define('cmocka', libs=['cmocka'], package='libcmocka-devel')
reqs = ['argobots', 'pmdk', 'cmocka', 'ofi', 'hwloc', 'mercury', 'boost',
'uuid', 'crypto', 'fuse', 'protobufc', 'json-c', 'lz4']
if not is_platform_arm():
reqs.extend(['spdk', 'isal', 'isal_crypto'])
prereqs.load_definitions(prebuild=reqs)
# install man pages
try:
SConscript('doc/man/SConscript', must_exist=0)
except SCons.Warnings.MissingSConscriptWarning as _warn:
print("Missing doc/man/SConscript...")

def scons(): # pylint: disable=too-many-locals
"""Execute build"""
Expand Down Expand Up @@ -379,10 +381,14 @@ def scons(): # pylint: disable=too-many-locals
if not os.path.exists(commits_file):
commits_file = None

platform_arm = is_platform_arm()

prereqs = PreReqComponent(env, opts, commits_file)
if not GetOption('help') and not GetOption('clean'):
daos_build.load_mpi_path(env)
preload_prereqs(prereqs)
build_prefix = prereqs.get_src_build_dir()
prereqs.init_build_targets(build_prefix)
prereqs.load_defaults(platform_arm)
if prereqs.check_component('valgrind_devel'):
env.AppendUnique(CPPDEFINES=["D_HAS_VALGRIND"])

Expand Down Expand Up @@ -412,7 +418,6 @@ def scons(): # pylint: disable=too-many-locals
conf_dir = ARGUMENTS.get('CONF_DIR', '$PREFIX/etc')

env.Alias('install', '$PREFIX')
platform_arm = is_platform_arm()
daos_version = get_version()
# Export() is handled specially by pylint so do not merge these two lines.
Export('daos_version', 'API_VERSION', 'env', 'il_env', 'prereqs')
Expand All @@ -424,8 +429,6 @@ def scons(): # pylint: disable=too-many-locals

set_defaults(env, daos_version)

build_prefix = prereqs.get_src_build_dir()

# generate targets in specific build dir to avoid polluting the source code
VariantDir(build_prefix, '.', duplicate=0)
SConscript('{}/src/SConscript'.format(build_prefix))
Expand All @@ -434,30 +437,26 @@ def scons(): # pylint: disable=too-many-locals
buildinfo.gen_script('.build_vars.sh')
buildinfo.save('.build_vars.json')
# also install to $PREFIX/lib to work with existing avocado test code
daos_build.install(env, "lib/daos/", ['.build_vars.sh', '.build_vars.json'])
if prereqs.test_requested():
daos_build.install(env, "lib/daos/",
['.build_vars.sh', '.build_vars.json'])
env.Install('$PREFIX/lib/daos/TESTING/ftest/util',
['site_scons/env_modules.py'])
env.Install('$PREFIX/lib/daos/TESTING/ftest/',
['ftest.sh'])

env.Install("$PREFIX/lib64/daos", "VERSION")

if prereqs.client_requested():
api_version = env.Command("%s/API_VERSION" % build_prefix,
"%s/SConstruct" % build_prefix,
"echo %s > $TARGET" % (API_VERSION))
env.Install("$PREFIX/lib64/daos", api_version)
env.Install(conf_dir + '/bash_completion.d', ['utils/completion/daos.bash'])
env.Install('$PREFIX/lib/daos/TESTING/ftest/util',
['site_scons/env_modules.py'])
env.Install('$PREFIX/lib/daos/TESTING/ftest/',
['ftest.sh'])
api_version = env.Command("%s/API_VERSION" % build_prefix,
"%s/SConstruct" % build_prefix,
"echo %s > $TARGET" % (API_VERSION))
env.Install("$PREFIX/lib64/daos", api_version)

# install the configuration files
SConscript('utils/config/SConscript')

# install certificate generation files
SConscript('utils/certs/SConscript')

# install man pages
SConscript('doc/man/SConscript')
build_misc()

Default(build_prefix)
Depends('install', build_prefix)

# an "rpms" target
env.Command('rpms', '', 'make -C utils/rpms rpms')
Expand Down
2 changes: 1 addition & 1 deletion TAG
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.3.101-tb
1.3.102-tb
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.3.101
1.3.102
6 changes: 6 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
daos (1.3.102-1) unstable; urgency=medium
[ Johann Lombardi]
* Version bump to 1.3.102 for 2.0 test build 2

-- Johann Lombardi <[email protected]> Fri, 11 Jun 2021 010:00:00 -0100

daos (1.3.101-3) unstable; urgency=medium
[ Johann Lombardi]
* Bump version to match the RPM's one
Expand Down
2 changes: 1 addition & 1 deletion doc/overview/use_cases.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ individual tasks, but not committed, are automatically rolled back.
In the previous <a href="6a">figure</a>, we have two examples of
producer/consumer. The down-sample job consumes raw timesteps generated
by the simulation job and produces sampled timesteps analyzed by the
post-process job. The DAOS stack provides specific mechanims for
post-process job. The DAOS stack provides specific mechanisms for
producer/consumer workflow which even allows the consumer to dumps the
result of its analysis into the same container as the producer.

Expand Down
8 changes: 7 additions & 1 deletion site_scons/components/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ def define_mercury(reqs):
commands=['./autogen.sh',
'./configure --prefix=$OFI_PREFIX ' +
'--disable-efa ' +
'--disable-psm3 ' +
'--without-gdrcopy ' +
OFI_DEBUG +
exclude(reqs, 'psm2',
Expand Down Expand Up @@ -196,6 +197,11 @@ def define_mercury(reqs):

def define_common(reqs):
"""common system component definitions"""
reqs.define('cmocka', libs=['cmocka'], package='libcmocka-devel')

reqs.define('libunwind', libs=['unwind'], headers=['libunwind.h'],
package='libunwind-devel')

reqs.define('lz4', headers=['lz4.h'], package='lz4-devel')

reqs.define('valgrind_devel', headers=['valgrind/valgrind.h'],
Expand Down Expand Up @@ -286,7 +292,7 @@ def define_components(reqs):
' --enable-stack-unwind',
'make $JOBS_OPT',
'make $JOBS_OPT install'],
requires=['valgrind_devel'],
requires=['valgrind_devel', 'libunwind'],
libs=['abt'],
headers=['abt.h'])

Expand Down
63 changes: 59 additions & 4 deletions site_scons/prereq_tools/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
from SCons.Script import Configure
from SCons.Script import AddOption
from SCons.Script import SConscript
from SCons.Script import BUILD_TARGETS
# pylint: disable=no-name-in-module
# pylint: disable=import-error
from SCons.Errors import UserError
Expand Down Expand Up @@ -464,9 +465,9 @@ def get(self, subdir, **kw): #pylint: disable=unused-argument
tfile.extractall()
if not RUNNER.run_commands(['mv %s %s' % (prefix, subdir)]):
raise ExtractionError(subdir)
except (IOError, tarfile.TarError):
except (IOError, tarfile.TarError) as io_error:
print(traceback.format_exc())
raise ExtractionError(subdir)
raise ExtractionError(subdir) from io_error
else:
raise UnsupportedCompression(subdir)

Expand Down Expand Up @@ -705,6 +706,22 @@ def __init__(self, env, variables, config_file=None, arch=None):

self.installed = env.subst("$USE_INSTALLED").split(",")
self.exclude = env.subst("$EXCLUDE").split(",")
self._build_targets = []

def init_build_targets(self, build_dir):
"""Setup default build targets"""
self.__env.Alias('client', build_dir)
self.__env.Alias('server', build_dir)
self.__env.Alias('test', build_dir)
self._build_targets = []
BUILD_TARGETS.append(build_dir)
if 'client' in BUILD_TARGETS:
self._build_targets.extend(['client'])
elif 'server' in BUILD_TARGETS:
self._build_targets.extend(['server'])
else:
# either test or default
self._build_targets.extend(['client', 'server', 'test'])

def has_source(self, env, *comps, **kw):
"""Check if source exists for a component"""
Expand Down Expand Up @@ -989,10 +1006,12 @@ def load_definitions(self, **kw):
"""

try:
#pylint: disable=import-outside-toplevel
from components import define_components
#pylint: enable=import-outside-toplevel
define_components(self)
except Exception:
raise BadScript("components", traceback.format_exc())
except Exception as old:
raise BadScript("components", traceback.format_exc()) from old

# Go ahead and prebuild some components

Expand All @@ -1001,6 +1020,42 @@ def load_definitions(self, **kw):
env = self.__env.Clone()
self.require(env, comp)

def load_defaults(self, is_arm):
"""Setup default build parameters"""
#argobots is not really needed by client but it's difficult to separate
common_reqs = ['argobots', 'ofi', 'hwloc', 'mercury', 'boost', 'uuid',
'crypto', 'protobufc', 'lz4']
client_reqs = ['fuse', 'json-c']
server_reqs = ['pmdk']
test_reqs = ['cmocka']

if not is_arm:
server_reqs.extend(['spdk'])
common_reqs.extend(['isal', 'isal_crypto'])
reqs = []
if not self._build_targets:
raise ValueError("Call init_build_targets before load_defaults")
reqs = common_reqs
if self.test_requested():
reqs.extend(test_reqs)
if self.server_requested():
reqs.extend(server_reqs)
if self.client_requested():
reqs.extend(client_reqs)
self.load_definitions(prebuild=reqs)

def server_requested(self):
"""return True if server build is requested"""
return "server" in self._build_targets

def client_requested(self):
"""return True if client build is requested"""
return "client" in self._build_targets

def test_requested(self):
"""return True if test build is requested"""
return "test" in self._build_targets

def modify_prefix(self, comp_def, env): #pylint: disable=unused-argument
"""Overwrite the prefix in cases where we may be using the default"""
if comp_def.package:
Expand Down
Loading

0 comments on commit d6f9b68

Please sign in to comment.