From a002e148f371bf8890cd0ea6bb52720cc7ddf33a Mon Sep 17 00:00:00 2001 From: Anqi Date: Wed, 15 Dec 2021 16:10:06 +0800 Subject: [PATCH] Update ex-ug-import-from-sst.md --- .../nebula-exchange/use-exchange/ex-ug-import-from-sst.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs-2.0/nebula-exchange/use-exchange/ex-ug-import-from-sst.md b/docs-2.0/nebula-exchange/use-exchange/ex-ug-import-from-sst.md index b7aa1e0cb96..8160c666749 100644 --- a/docs-2.0/nebula-exchange/use-exchange/ex-ug-import-from-sst.md +++ b/docs-2.0/nebula-exchange/use-exchange/ex-ug-import-from-sst.md @@ -443,12 +443,12 @@ SST 文件是一个内部包含了任意长度的有序键值对集合的文件 运行如下命令将 CSV 源文件生成为 SST 文件。关于参数的说明,请参见[命令参数](../parameter-reference/ex-ug-para-import-command.md)。 ```bash -${SPARK_HOME}/bin/spark-submit --master "local" --conf spark.sql.shuffle.partition= --class com.vesoft.nebula.exchange.Exchange -c +${SPARK_HOME}/bin/spark-submit --master "local" --conf spark.sql.shuffle.partitions= --class com.vesoft.nebula.exchange.Exchange -c ``` !!! note - 生成 SST 文件时,会涉及到 Spark 的 shuffle 操作,请注意在提交命令中增加`spark.sql.shuffle.partition`的配置。 + 生成 SST 文件时,会涉及到 Spark 的 shuffle 操作,请注意在提交命令中增加`spark.sql.shuffle.partitions`的配置。 !!! note @@ -457,7 +457,7 @@ ${SPARK_HOME}/bin/spark-submit --master "local" --conf spark.sql.shuffle.partiti 示例: ```bash -${SPARK_HOME}/bin/spark-submit --master "local" --conf spark.sql.shuffle.partition=200 --class com.vesoft.nebula.exchange.Exchange /root/nebula-exchange/nebula-exchange/target/nebula-exchange-{{exchange.release}}.jar -c /root/nebula-exchange/nebula-exchange/target/classes/sst_application.conf +${SPARK_HOME}/bin/spark-submit --master "local" --conf spark.sql.shuffle.partitions=200 --class com.vesoft.nebula.exchange.Exchange /root/nebula-exchange/nebula-exchange/target/nebula-exchange-{{exchange.release}}.jar -c /root/nebula-exchange/nebula-exchange/target/classes/sst_application.conf ``` 任务执行完成后,可以在 HDFS 上的`/sst`目录(`nebula.path.remote`参数指定)内查看到生成的 SST 文件。