Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into DAOS-4350
Browse files Browse the repository at this point in the history
  • Loading branch information
PetFet committed Apr 6, 2020
2 parents c5517d2 + 6a6114d commit 707647e
Show file tree
Hide file tree
Showing 125 changed files with 2,188 additions and 2,511 deletions.
66 changes: 45 additions & 21 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def el7_component_repos = ""
def component_repos = ""
def daos_repo = "daos@${env.BRANCH_NAME}:${env.BUILD_NUMBER}"
def el7_daos_repos = el7_component_repos + ' ' + component_repos + ' ' + daos_repo
def functional_rpms = "--exclude openmpi openmpi3 hwloc ndctl spdk-tools " +
def functional_rpms = "--exclude openmpi openmpi3 hwloc ndctl " +
"ior-hpc-cart-4-daos-0 mpich-autoload-cart-4-daos-0 " +
"romio-tests-cart-4-daos-0 hdf5-tests-cart-4-daos-0 " +
"mpi4py-tests-cart-4-daos-0 testmpio-cart-4-daos-0"
Expand Down Expand Up @@ -262,7 +262,41 @@ pipeline {
}
*/
}
}
} // stage('checkpatch')
stage('Python Bandit check') {
when {
beforeAgent true
expression {
! commitPragma(pragma: 'Skip-python-bandit').contains('true')
}
}
agent {
dockerfile {
filename 'Dockerfile.code_scanning'
dir 'utils/docker'
label 'docker_runner'
additionalBuildArgs '--build-arg UID=$(id -u) --build-arg JENKINS_URL=' +
env.JENKINS_URL
}
}
steps {
githubNotify credentialsId: 'daos-jenkins-commit-status',
description: env.STAGE_NAME,
context: "build" + "/" + env.STAGE_NAME,
status: "PENDING"
checkoutScm withSubmodules: true
catchError(stageResult: 'UNSTABLE', buildResult: 'SUCCESS') {
runTest script: 'bandit -r . --format xml -o bandit.xml',
junit_files: "bandit.xml",
ignore_failure: true
}
}
post {
always {
junit 'bandit.xml'
}
}
} // stage('Python Bandit check')
}
}
stage('Build') {
Expand Down Expand Up @@ -1031,24 +1065,12 @@ pipeline {
}
parallel {
stage('Coverity on CentOS 7') {
// Eventually this will only run on Master builds.
// Unfortunately for now, a PR build could break
// the quickbuild, which would not be detected until
// the master build fails.
// when {
// beforeAgent true
// anyOf {
// branch 'master'
// not {
// // expression returns false on grep match
// expression {
// sh script: 'git show -s --format=%B |' +
// ' grep "^Coverity-test: true"',
// returnStatus: true
// }
// }
// }
// }
when {
beforeAgent true
expression {
! commitPragma(pragma: 'Skip-coverity-test').contains('true')
}
}
agent {
dockerfile {
filename 'Dockerfile.centos.7'
Expand Down Expand Up @@ -1528,7 +1550,9 @@ pipeline {
allOf {
not { branch 'weekly-testing' }
not { environment name: 'CHANGE_TARGET', value: 'weekly-testing' }
// expression { ! skip_stage('scan-centos-rpms') }
expression {
! commitPragma(pragma: 'Skip-scan-centos-rpms').contains('true')
}
}
}
agent {
Expand Down
1 change: 0 additions & 1 deletion SConstruct
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,6 @@ def scons(): # pylint: disable=too-many-locals
preload_prereqs(prereqs)
if prereqs.check_component('valgrind_devel'):
env.AppendUnique(CPPDEFINES=["DAOS_HAS_VALGRIND"])
prereqs.has_source(env, 'fio')
prereqs.add_opts(('GO_BIN', 'Full path to go binary', None))
opts.Save(opts_file, env)

Expand Down
2 changes: 1 addition & 1 deletion doc/admin/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ $ scons --config=force install
```

If you are a developer of DAOS, we recommend following the instructions in the
[DAOS for Development](https://daos-stack.github.io/admin/installation/#daos-for-development) section.
[DAOS for Development](https://daos-stack.github.io/dev/development/#building-daos-for-development) section.

Otherwise, the missing dependencies can be built automatically by invoking scons
with the following parameters:
Expand Down
4 changes: 4 additions & 0 deletions doc/man/man8/daos.8
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,13 @@ The \fBRESOURCE\fRs, respective \fBCOMMAND\fRs and \fBOPTION\fRs supported by \f
\fBcontainer \fR(\fBcont\fR) container in a pool
.br
\fBobject \fR(\fBobj\fR) object in a container
.br
\fBversion\fR print command version
.br
\fBhelp\fR print this message and exit
.TP
.I help \fR[\fBRESOURCE \fR[\fBCOMMAND\fR]] \h'4' per-resource/command help
.TP
.I pool \fBCOMMAND\fRs:
\fBlist-containers\fR list all containers in pool
.br
Expand Down
2 changes: 1 addition & 1 deletion doc/overview/architecture.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ Persistent Memory Development Kit (PMDK)[^1] allows managing
transactional access to SCM and the Storage Performance Development Kit
(SPDK)[^2] enables user-space I/O to NVMe devices.

![](./media/image1.png)
![](../admin/media/image1.png)
Figure 2-1. DAOS Storage

DAOS aims at delivering:
Expand Down
25 changes: 15 additions & 10 deletions doc/user/spark.md
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
# Getting Started with the DAOS Hadoop Filesystem

Here, we describe the steps required to build and deploy the DAOS Hadoop
filesystem, and the configurations to access DAOS in Spark. We assume DAOS
servers and agents have already been deployed in the environment; otherwise,
they can be deployed by following the
[DAOS installation guide](https://daos-stack.github.io/admin/installation/).

## Build DAOS Hadoop Filesystem
The DAOS DFS Java API and Hadoop filesystem implementation have been merged into
the DAOS repository. Below are the steps to build the java jar files for the DFS
Java API and DAOS Hadoop filesystem. These jar files are required when running
Spark. You can ignore this section if you already have the pre-built jars.

The DAOS Java and Hadoop filesystem implementation have been merged into
the DAOS repository. Below are the steps to build the Java jar files for the
DAOS Java and DAOS Hadoop filesystem. These jar files are required when
running Spark. You can ignore this section if you already have the pre-built
jars.

```bash
$ git clone https://github.com/daos-stack/daos.git
Expand All @@ -24,10 +27,11 @@ After build, the package daos-java-<version>-assemble.tgz will be available
under distribution/target.

## Deploy DAOS Hadoop Filesystem
After unzipping `daos-java-<version>-assemble.tgz`, you will get the following
files.

* `daos-java-api-<version>.jar` and `hadoop-daos-<version>.jar`
After unzipping `daos-java-<version>-assemble.tgz`, you will get the
following files.

* `daos-java-<version>.jar` and `hadoop-daos-<version>.jar`
These files need to be deployed on every compute node that runs Spark.
Place them in a directory, e.g., $SPARK_HOME/jars, that are accessible to all
the nodes or copy them to every node.
Expand All @@ -44,8 +48,8 @@ of the Spark executor and driver. This can be configured in Spark's
configuration file spark-defaults.conf.

```
spark.executor.extraClassPath /path/to/daos-java-api-<version>.jar:/path/to/hadoop-daos-<version>.jar
spark.driver.extraClassPath /path/to/daos-java-api-<version>.jar:/path/to/hadoop-daos-<version>.jar
spark.executor.extraClassPath /path/to/daos-java-<version>.jar:/path/to/hadoop-daos-<version>.jar
spark.driver.extraClassPath /path/to/daos-java-<version>.jar:/path/to/hadoop-daos-<version>.jar
```

* Next, export all DAOS related env variables and the following env variable in
Expand Down Expand Up @@ -136,12 +140,13 @@ container UUID. Then set `fs.daos.preload.size` to a value greater than 0 and
`c2.fs.daos.preload.size` to 0.

## Access DAOS in Spark

All Spark APIs that work with the Hadoop filesystem will work with DAOS. We use
the `daos://` URI to access files stored in DAOS. For example, to read
people.json file from the root directory of DAOS filesystem, we can use the
following pySpark code:


```python
df = spark.read.json("daos://default:1/people.json")
```

13 changes: 4 additions & 9 deletions src/bio/SConscript
Original file line number Diff line number Diff line change
Expand Up @@ -14,22 +14,17 @@ def scons():
SConscript('smd/SConscript')
denv.AppendUnique(LIBPATH=['smd'])

# Link to DPDK static libs
denv.AppendUnique(LINKFLAGS=['-Wl,--whole-archive', \
'-lrte_mempool', '-lrte_mempool_ring', '-lrte_bus_pci', \
'-lrte_pci', '-lrte_ring', '-lrte_mbuf', '-lrte_eal', \
'-lrte_kvargs', \
'-Wl,--no-whole-archive'])

# SPDK shared libs
# SPDK related libs
libs = ['spdk_env_dpdk', 'spdk_thread', 'spdk_bdev', 'spdk_copy']
libs += ['rte_mempool', 'rte_mempool_ring', 'rte_bus_pci']
libs += ['rte_pci', 'rte_ring', 'rte_mbuf', 'rte_eal', 'rte_kvargs']
libs += ['spdk_bdev_aio', 'spdk_bdev_nvme', 'spdk_bdev_malloc']
libs += ['spdk_conf', 'spdk_blob', 'spdk_nvme', 'spdk_util']
libs += ['spdk_json', 'spdk_jsonrpc', 'spdk_rpc', 'spdk_trace']
libs += ['spdk_sock', 'spdk_log', 'spdk_notify', 'spdk_blob_bdev']

# Other libs
libs += ['numa', 'smd']
libs += ['numa', 'dl', 'smd']

bio = daos_build.library(denv, "bio", Glob('*.c'), LIBS=libs)
denv.Install('$PREFIX/lib64/daos_srv', bio)
Expand Down
9 changes: 6 additions & 3 deletions src/bio/bio_monitor.c
Original file line number Diff line number Diff line change
Expand Up @@ -153,11 +153,12 @@ get_spdk_err_log_page_completion(struct spdk_bdev_io *bdev_io, bool success,
{
struct bio_dev_health *dev_health = cb_arg;
int sc, sct;
uint32_t cdw0;

D_ASSERT(dev_health->bdh_inflights == 1);

/* Additional NVMe status information */
spdk_bdev_io_get_nvme_status(bdev_io, &sct, &sc);
spdk_bdev_io_get_nvme_status(bdev_io, &cdw0, &sct, &sc);
if (sc)
D_ERROR("NVMe status code/type: %d/%d\n", sc, sct);

Expand All @@ -180,11 +181,12 @@ get_spdk_identify_ctrlr_completion(struct spdk_bdev_io *bdev_io, bool success,
uint32_t numd, numdl, numdu;
int rc;
int sc, sct;
uint32_t cdw0;

D_ASSERT(dev_health->bdh_inflights == 1);

/* Additional NVMe status information */
spdk_bdev_io_get_nvme_status(bdev_io, &sct, &sc);
spdk_bdev_io_get_nvme_status(bdev_io, &cdw0, &sct, &sc);
if (sc) {
D_ERROR("NVMe status code/type: %d/%d\n", sc, sct);
dev_health->bdh_inflights--;
Expand Down Expand Up @@ -248,11 +250,12 @@ get_spdk_log_page_completion(struct spdk_bdev_io *bdev_io, bool success,
uint8_t crit_warn;
int rc;
int sc, sct;
uint32_t cdw0;

D_ASSERT(dev_health->bdh_inflights == 1);

/* Additional NVMe status information */
spdk_bdev_io_get_nvme_status(bdev_io, &sct, &sc);
spdk_bdev_io_get_nvme_status(bdev_io, &cdw0, &sct, &sc);
if (sc) {
D_ERROR("NVMe status code/type: %d/%d\n", sc, sct);
dev_health->bdh_inflights--;
Expand Down
3 changes: 0 additions & 3 deletions src/bio/bio_xstream.c
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,6 @@
#include "bio_internal.h"
#include <daos_srv/smd.h>

/* FIXME: remove it once SPDK being upgraded */
void spdk_set_thread(struct spdk_thread *thread);

/* These Macros should be turned into DAOS configuration in the future */
#define DAOS_MSG_RING_SZ 4096
/* SPDK blob parameters */
Expand Down
3 changes: 0 additions & 3 deletions src/client/api/init.c
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@

static pthread_mutex_t module_lock = PTHREAD_MUTEX_INITIALIZER;
static bool module_initialized;
bool dfs_no_cond_op;

const struct daos_task_api dc_funcs[] = {
/** Managment */
Expand Down Expand Up @@ -194,8 +193,6 @@ daos_init(void)
if (rc != 0)
D_GOTO(out_obj, rc);

d_getenv_bool("DFS_NO_COND_OP", &dfs_no_cond_op);

module_initialized = true;
D_GOTO(unlock, rc = 0);

Expand Down
Loading

0 comments on commit 707647e

Please sign in to comment.