Skip to content

Commit

Permalink
SDK-381 support for spark streaming cluster (#289)
Browse files Browse the repository at this point in the history
support for spark streaming cluster
  • Loading branch information
saiyam1712 authored and chattarajoy committed Nov 4, 2019
1 parent baca769 commit 3dc5bc4
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 2 deletions.
2 changes: 1 addition & 1 deletion qds_sdk/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def engine_parser(argparser):
engine_group = argparser.add_argument_group("engine settings")
engine_group.add_argument("--flavour",
dest="flavour",
choices=["hadoop", "hadoop2", "presto", "spark", "hbase", "airflow", "deeplearning"],
choices=["hadoop", "hadoop2", "presto", "spark", "sparkstreaming", "hbase", "airflow", "deeplearning"],
default=None,
help="Set engine flavour")

Expand Down
19 changes: 18 additions & 1 deletion tests/test_clusterv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,23 @@ def test_spark_engine_config(self):
'custom_spark_config': 'spark-overrides'}},
'cluster_info': {'label': ['test_label'],}})

def test_sparkstreaming_engine_config(self):
with tempfile.NamedTemporaryFile() as temp:
temp.write("config.properties:\na=1\nb=2".encode("utf8"))
temp.flush()
sys.argv = ['qds.py', '--version', 'v2', 'cluster', 'create', '--label', 'test_label',
'--flavour', 'sparkstreaming', '--custom-spark-config', 'spark-overrides']
Qubole.cloud = None
print_command()
Connection._api_call = Mock(return_value={})
qds.main()
Connection._api_call.assert_called_with('POST', 'clusters',
{'engine_config':
{'flavour': 'sparkstreaming',
'spark_settings': {
'custom_spark_config': 'spark-overrides'}},
'cluster_info': {'label': ['test_label'],}})

def test_airflow_engine_config(self):
with tempfile.NamedTemporaryFile() as temp:
temp.write("config.properties:\na=1\nb=2".encode("utf8"))
Expand Down Expand Up @@ -787,7 +804,7 @@ def test_engine_config(self):
temp.write("a=1\nb=2".encode("utf8"))
temp.flush()
sys.argv = ['qds.py', '--version', 'v2', 'cluster', 'update', '123',
'--use-qubole-placement-policy', '--enable-rubix',
'--use-qubole-placement-policy', '--enable-rubix',
'--custom-hadoop-config',temp.name]
Qubole.cloud = None
print_command()
Expand Down

0 comments on commit 3dc5bc4

Please sign in to comment.