Skip to content

Commit

Permalink
Group infer instruction parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
kthui committed May 31, 2024
1 parent 5141d8f commit 03ae1f1
Showing 1 changed file with 106 additions and 163 deletions.
269 changes: 106 additions & 163 deletions qa/L0_backend_python/response_sender/response_sender_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,63 @@


class ResponseSenderTest(unittest.TestCase):
_inputs_parameters_zero_response_pre_return = {
"number_of_response_before_return": 0,
"send_complete_final_flag_before_return": True,
"return_a_response": False,
"number_of_response_after_return": 0,
"send_complete_final_flag_after_return": False,
}
_inputs_parameters_zero_response_post_return = {
"number_of_response_before_return": 0,
"send_complete_final_flag_before_return": False,
"return_a_response": False,
"number_of_response_after_return": 0,
"send_complete_final_flag_after_return": True,
}
_inputs_parameters_one_response_pre_return = {
"number_of_response_before_return": 1,
"send_complete_final_flag_before_return": True,
"return_a_response": False,
"number_of_response_after_return": 0,
"send_complete_final_flag_after_return": False,
}
_inputs_parameters_one_response_post_return = {
"number_of_response_before_return": 0,
"send_complete_final_flag_before_return": False,
"return_a_response": False,
"number_of_response_after_return": 1,
"send_complete_final_flag_after_return": True,
}
_inputs_parameters_two_response_pre_return = {
"number_of_response_before_return": 2,
"send_complete_final_flag_before_return": True,
"return_a_response": False,
"number_of_response_after_return": 0,
"send_complete_final_flag_after_return": False,
}
_inputs_parameters_two_response_post_return = {
"number_of_response_before_return": 0,
"send_complete_final_flag_before_return": False,
"return_a_response": False,
"number_of_response_after_return": 2,
"send_complete_final_flag_after_return": True,
}
_inputs_parameters_response_pre_and_post_return = {
"number_of_response_before_return": 1,
"send_complete_final_flag_before_return": False,
"return_a_response": False,
"number_of_response_after_return": 3,
"send_complete_final_flag_after_return": True,
}
_inputs_parameters_one_response_on_return = {
"number_of_response_before_return": 0,
"send_complete_final_flag_before_return": False,
"return_a_response": True,
"number_of_response_after_return": 0,
"send_complete_final_flag_after_return": False,
}

def _get_inputs(
self,
number_of_response_before_return,
Expand Down Expand Up @@ -67,94 +124,6 @@ def _get_inputs(
)
return inputs

def _get_parallel_inputs_and_response_types_decoupled(self):
return {
"parallel_inputs": [
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
),
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=True,
),
self._get_inputs(
number_of_response_before_return=1,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
),
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=1,
send_complete_final_flag_after_return=True,
),
self._get_inputs(
number_of_response_before_return=2,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
),
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=2,
send_complete_final_flag_after_return=True,
),
self._get_inputs(
number_of_response_before_return=1,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=3,
send_complete_final_flag_after_return=True,
),
],
"number_of_response_before_return": 4,
"return_a_response": 0,
"number_of_response_after_return": 6,
}

def _get_parallel_inputs_and_response_types_non_decoupled(self):
return {
"parallel_inputs": [
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=True,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
),
self._get_inputs(
number_of_response_before_return=1,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
),
self._get_inputs(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=1,
send_complete_final_flag_after_return=True,
),
],
"number_of_response_before_return": 1,
"return_a_response": 1,
"number_of_response_after_return": 1,
}

def _generate_streaming_callback_and_responses_pair(self):
responses = [] # [{"result": result, "error": error}, ...]

Expand Down Expand Up @@ -310,156 +279,130 @@ def _assert_non_decoupled_infer_success(
# Decoupled model send response final flag before request return.
def test_decoupled_zero_response_pre_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_zero_response_pre_return
)

# Decoupled model send response final flag after request return.
def test_decoupled_zero_response_post_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=True,
**self._inputs_parameters_zero_response_post_return
)

# Decoupled model send 1 response before request return.
def test_decoupled_one_response_pre_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=1,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_one_response_pre_return
)

# Decoupled model send 1 response after request return.
def test_decoupled_one_response_post_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=1,
send_complete_final_flag_after_return=True,
**self._inputs_parameters_one_response_post_return
)

# Decoupled model send 2 response before request return.
def test_decoupled_two_response_pre_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=2,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_two_response_pre_return
)

# Decoupled model send 2 response after request return.
def test_decoupled_two_response_post_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=2,
send_complete_final_flag_after_return=True,
**self._inputs_parameters_two_response_post_return
)

# Decoupled model send 1 and 3 responses before and after return.
def test_decoupled_response_pre_and_post_return(self):
self._assert_decoupled_infer_success(
number_of_response_before_return=1,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=3,
send_complete_final_flag_after_return=True,
**self._inputs_parameters_response_pre_and_post_return
)

# Non-decoupled model send 1 response on return.
def test_non_decoupled_one_response_on_return(self):
self._assert_non_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=True,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_one_response_on_return
)

# Non-decoupled model send 1 response before return.
def test_non_decoupled_one_response_pre_return(self):
self._assert_non_decoupled_infer_success(
number_of_response_before_return=1,
send_complete_final_flag_before_return=True,
return_a_response=False,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_one_response_pre_return
)

# Non-decoupled model send 1 response after return.
def test_non_decoupled_one_response_post_return(self):
self._assert_non_decoupled_infer_success(
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=False,
number_of_response_after_return=1,
send_complete_final_flag_after_return=True,
**self._inputs_parameters_one_response_post_return
)

# Decoupled model requests each responding differently.
def test_decoupled_multiple_requests(self):
parallel_inputs = [
self._get_inputs(**self._inputs_parameters_zero_response_pre_return),
self._get_inputs(**self._inputs_parameters_zero_response_post_return),
self._get_inputs(**self._inputs_parameters_one_response_pre_return),
self._get_inputs(**self._inputs_parameters_one_response_post_return),
self._get_inputs(**self._inputs_parameters_two_response_pre_return),
self._get_inputs(**self._inputs_parameters_two_response_post_return),
self._get_inputs(**self._inputs_parameters_response_pre_and_post_return),
]
expected_number_of_response_before_return = 4
expected_return_a_response = False
expected_number_of_response_after_return = 6

model_name = "response_sender_decoupled_batching"
ios = self._get_parallel_inputs_and_response_types_decoupled()
responses = self._infer_parallel(model_name, ios["parallel_inputs"])
responses = self._infer_parallel(model_name, parallel_inputs)
self._assert_responses_valid(
responses,
ios["number_of_response_before_return"],
ios["return_a_response"],
ios["number_of_response_after_return"],
expected_number_of_response_before_return,
expected_return_a_response,
expected_number_of_response_after_return,
)
# Do NOT group into a for-loop as it hides which model failed.
model_name = "response_sender_decoupled_async_batching"
ios = self._get_parallel_inputs_and_response_types_decoupled()
responses = self._infer_parallel(model_name, ios["parallel_inputs"])
responses = self._infer_parallel(model_name, parallel_inputs)
self._assert_responses_valid(
responses,
ios["number_of_response_before_return"],
ios["return_a_response"],
ios["number_of_response_after_return"],
expected_number_of_response_before_return,
expected_return_a_response,
expected_number_of_response_after_return,
)

# Non-decoupled model requests each responding differently.
def test_non_decoupled_multiple_requests(self):
parallel_inputs = [
self._get_inputs(**self._inputs_parameters_one_response_on_return),
self._get_inputs(**self._inputs_parameters_one_response_pre_return),
self._get_inputs(**self._inputs_parameters_one_response_post_return),
]
expected_number_of_response_before_return = 1
expected_return_a_response = True
expected_number_of_response_after_return = 1

model_name = "response_sender_batching"
ios = self._get_parallel_inputs_and_response_types_non_decoupled()
responses = self._infer_parallel(model_name, ios["parallel_inputs"])
responses = self._infer_parallel(model_name, parallel_inputs)
self._assert_responses_valid(
responses,
ios["number_of_response_before_return"],
ios["return_a_response"],
ios["number_of_response_after_return"],
expected_number_of_response_before_return,
expected_return_a_response,
expected_number_of_response_after_return,
)
# Do NOT group into a for-loop as it hides which model failed.
model_name = "response_sender_async_batching"
ios = self._get_parallel_inputs_and_response_types_non_decoupled()
responses = self._infer_parallel(model_name, ios["parallel_inputs"])
responses = self._infer_parallel(model_name, parallel_inputs)
self._assert_responses_valid(
responses,
ios["number_of_response_before_return"],
ios["return_a_response"],
ios["number_of_response_after_return"],
expected_number_of_response_before_return,
expected_return_a_response,
expected_number_of_response_after_return,
)

# Decoupled model send 1 response on return.
def test_decoupled_one_response_on_return(self):
responses = self._infer(
model_name="response_sender_decoupled",
number_of_response_before_return=0,
send_complete_final_flag_before_return=False,
return_a_response=True,
number_of_response_after_return=0,
send_complete_final_flag_after_return=False,
**self._inputs_parameters_one_response_on_return,
)
self._assert_responses_exception(
responses,
Expand Down

0 comments on commit 03ae1f1

Please sign in to comment.