diff --git a/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-grpc.yaml b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-grpc.yaml index dfc66aac..909e827d 100644 --- a/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-grpc.yaml +++ b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-grpc.yaml @@ -5,18 +5,16 @@ metadata: serving.knative.openshift.io/enablePassthrough: "true" sidecar.istio.io/inject: "true" sidecar.istio.io/rewriteAppHTTPProbers: "true" - name: caikit-tgis-example-isvc + name: caikit-tgis-isvc-grpc spec: predictor: + serviceAccountName: sa model: modelFormat: name: caikit - runtime: caikit-tgis-runtime - ports: - - containerPort: 8085 - name: h2c - protocol: TCP - storageUri: proto://path/to/model # single model here + runtime: caikit-tgis-runtime-grpc + storageUri: s3://modelmesh-example-models/llm/models/flan-t5-small-caikit # single model here + # storageUri: proto://path/to/model # single model here # Example, using a pvc: # storageUri: pvc://caikit-pvc/flan-t5-small-caikit/ # Target directory must contain a config.yml diff --git a/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-template.yaml b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-template.yaml new file mode 100644 index 00000000..6da3600f --- /dev/null +++ b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-template.yaml @@ -0,0 +1,24 @@ +apiVersion: serving.kserve.io/v1beta1 +kind: InferenceService +metadata: + annotations: + serving.knative.openshift.io/enablePassthrough: "true" + sidecar.istio.io/inject: "true" + sidecar.istio.io/rewriteAppHTTPProbers: "true" + # The following should be set to the + # actual name of the inference service. (e.g., caikit-tgis-isvc + # for HTTP and caikit-tgis-isvc-grpc for gRPC) + name: +spec: + predictor: + # replace in following with the name + # of a ServiceAccount that has the secret for accessing the model + serviceAccountName: + model: + modelFormat: + name: caikit + runtime: caikit-tgis-runtime + storageUri: proto://path/to/model # single model here + # Example, using a pvc: + # storageUri: pvc://caikit-pvc/flan-t5-small-caikit/ + # Target directory must contain a config.yml diff --git a/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc.yaml b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc.yaml index 9f46cd09..847a2201 100644 --- a/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc.yaml +++ b/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc.yaml @@ -5,14 +5,16 @@ metadata: serving.knative.openshift.io/enablePassthrough: "true" sidecar.istio.io/inject: "true" sidecar.istio.io/rewriteAppHTTPProbers: "true" - name: caikit-tgis-example-isvc + name: caikit-tgis-isvc spec: predictor: + serviceAccountName: sa model: modelFormat: name: caikit runtime: caikit-tgis-runtime - storageUri: proto://path/to/model # single model here + storageUri: s3://modelmesh-example-models/llm/models/flan-t5-small-caikit # single model here + # storageUri: proto://path/to/model # single model here # Example, using a pvc: # storageUri: pvc://caikit-pvc/flan-t5-small-caikit/ # Target directory must contain a config.yml diff --git a/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime-grpc.yaml b/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime-grpc.yaml new file mode 100644 index 00000000..0da088b5 --- /dev/null +++ b/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime-grpc.yaml @@ -0,0 +1,36 @@ +apiVersion: serving.kserve.io/v1alpha1 +kind: ServingRuntime +metadata: + name: caikit-tgis-runtime-grpc +spec: + multiModel: false + supportedModelFormats: + # Note: this currently *only* supports caikit format models + - autoSelect: true + name: caikit + containers: + - name: kserve-container + image: quay.io/opendatahub/text-generation-inference:stable + command: ["text-generation-launcher"] + args: ["--model-name=/mnt/models/artifacts/"] + env: + - name: TRANSFORMERS_CACHE + value: /tmp/transformers_cache + # resources: # configure as required + # requests: + # cpu: 8 + # memory: 16Gi + - name: transformer-container + image: quay.io/opendatahub/caikit-tgis-serving:stable + command: ["python", "-m", "caikit.runtime.grpc_server"] + env: + - name: RUNTIME_LOCAL_MODELS_DIR + value: /mnt/models + ports: + - containerPort: 8085 + name: h2c + protocol: TCP + # resources: # configure as required + # requests: + # cpu: 8 + # memory: 16Gi \ No newline at end of file diff --git a/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime.yaml b/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime.yaml index 8528909c..38a139e6 100644 --- a/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime.yaml +++ b/demo/kserve/custom-manifests/caikit/caikit-tgis-servingruntime.yaml @@ -22,6 +22,7 @@ spec: # memory: 16Gi - name: transformer-container image: quay.io/opendatahub/caikit-tgis-serving:stable + command: ["python", "-m", "caikit.runtime.http_server"] env: - name: RUNTIME_LOCAL_MODELS_DIR value: /mnt/models diff --git a/demo/kserve/deploy-remove-scripts.md b/demo/kserve/deploy-remove-scripts.md index 498d7216..f95c1561 100644 --- a/demo/kserve/deploy-remove-scripts.md +++ b/demo/kserve/deploy-remove-scripts.md @@ -14,26 +14,31 @@ Note: If you prefer to deploy and remove an LLM model by using step-by-step comm **Procedure** -1. Deploy a sample LLM model. +1. Deploy a sample LLM model + For HTTP: ~~~ ./scripts/test/deploy-model.sh ~~~ -2. Perform inference with a HTTP or gRPC call. + For gRPC: + ~~~ + ./scripts/test/deploy-model.sh grpc + ~~~ - 2-http. If using HTTP: +2. Perform inference: + + For HTTP: ~~~ ./scripts/test/http-call.sh ~~~ - - 2-grpc. If using gRPC: + For gRPC: ~~~ ./scripts/test/grpc-call.sh ~~~ -3. Delete the sample model and the MinIO namespace. +3. Delete the sample model: ~~~ ./scripts/test/delete-model.sh diff --git a/demo/kserve/deploy-remove.md b/demo/kserve/deploy-remove.md index aff9b196..2c551e69 100644 --- a/demo/kserve/deploy-remove.md +++ b/demo/kserve/deploy-remove.md @@ -30,7 +30,9 @@ Note: The **flan-t5-small** LLM model has been containerized into an S3 MinIO bu ACCESS_KEY_ID=admin SECRET_ACCESS_KEY=password MINIO_NS=minio + ``` + ``` oc new-project ${MINIO_NS} oc apply -f ./custom-manifests/minio/minio.yaml -n ${MINIO_NS} sed "s//$MINIO_NS/g" ./custom-manifests/minio/minio-secret.yaml | tee ./minio-secret-current.yaml | oc -n ${MINIO_NS} apply -f - @@ -39,46 +41,100 @@ Note: The **flan-t5-small** LLM model has been containerized into an S3 MinIO bu 2. Deploy the LLM model with Caikit+TGIS Serving runtime - a. Create a new namespace. + a. Choose protocol to be used to invoke inferences: + Default protocol is HTTP (e.g., curl commands). + If you want to use gRPC set INF_PROTO to "-grpc" value, either skip the following command lines. + + ``` + INF_PROTO="-grpc" + ``` + + b. Create a new namespace. ```bash - export TEST_NS=kserve-demo + export TEST_NS="kserve-demo" oc new-project ${TEST_NS} ``` - b. Create a caikit `ServingRuntime`. By default, it requests 4CPU and 8Gi of memory. You can adjust these values as needed. + c. Create a caikit `ServingRuntime`. + + By default, it requests 4CPU and 8Gi of memory. You can adjust these values as needed. ```bash - oc apply -f ./custom-manifests/caikit/caikit-tgis-servingruntime.yaml -n ${TEST_NS} + oc apply -f ./custom-manifests/caikit/caikit-tgis-servingruntime"$INF_PROTO".yaml -n ${TEST_NS} ``` - c. Deploy the MinIO data connection and service account. + d. Deploy the MinIO data connection and service account. ```bash oc apply -f ./minio-secret-current.yaml -n ${TEST_NS} oc create -f ./serviceaccount-minio-current.yaml -n ${TEST_NS} ``` - d. Deploy the inference service. It will point to the model located in the `modelmesh-example-models/llm/models` directory. + e. Deploy the inference service. + + The [ISVC template file](/demo/kserve/custom-manifests/caikit/caikit-tgis-isvc-template.yaml) shown below contains all that is needed to set up the Inference Service ```bash - oc apply -f ./custom-manifests/caikit/caikit-tgis-isvc.yaml -n ${TEST_NS} + apiVersion: serving.kserve.io/v1beta1 + kind: InferenceService + metadata: + annotations: + serving.knative.openshift.io/enablePassthrough: "true" + sidecar.istio.io/inject: "true" + sidecar.istio.io/rewriteAppHTTPProbers: "true" + # The following should be set to the + # actual name of the inference service. (e.g., caikit-tgis-isvc + # for HTTP and caikit-tgis-isvc-grpc for gRPC) + name: + spec: + predictor: + # replace in following with the name + # of a ServiceAccount that has the secret for accessing the model + serviceAccountName: + model: + modelFormat: + name: caikit + runtime: caikit-tgis-runtime + storageUri: proto://path/to/model # single model here + # Example, using a pvc: + # storageUri: pvc://caikit-pvc/flan-t5-small-caikit/ + # Target directory must contain a config.yml ``` - e. Verify that the inference service's `READY` state is `True`. + Before using it, the following details have to be added: + + - `` should be replaced by the name of the inference + - `` should be replaced by the actual name of the Service Account + - `proto://path/to/model` should be replaced by the actual path to the model that will run the inferences + + Note: If you followed all the steps to this point, the following code will + create the needed Inference Service using the Minio with the flan-t5-small + model and the service account that have been created in the previous steps. + + ```bash + ISVC_NAME=caikit-tgis-isvc$INF_PROTO + oc apply -f ./custom-manifests/caikit/"$ISVC_NAME".yaml -n ${TEST_NS} + ``` + + f. Verify that the inference service's `READY` state is `True`. ```bash - oc get isvc/caikit-example-isvc -n ${TEST_NS} + oc get isvc/$ISVC_NAME -n ${TEST_NS} ``` -3. Perform inference using HTTP (default) or gRPC +3. Perform inference using HTTP or either gRPC ( + + Compute KSVC_HOSTNAME: + ```bash + export KSVC_HOSTNAME=$(oc get ksvc "$ISVC_NAME"-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) + ``` 3-http. Perform inference with HTTP. This example uses cURL. a. Run the following `curl` command for all tokens in a single call: ```bash - export KSVC_HOSTNAME=$(oc get ksvc caikit-example-isvc-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) curl -kL -H 'Content-Type: application/json' -d '{"model_id": "flan-t5-small-caikit", "inputs": "At what temperature does Nitrogen boil?"}' https://${KSVC_HOSTNAME}/api/v1/task/text-generation ``` @@ -156,7 +212,6 @@ Note: The **flan-t5-small** LLM model has been containerized into an S3 MinIO bu c. Run the following `grpcurl` command for all tokens in a single call: ```bash - export KSVC_HOSTNAME=$(oc get ksvc caikit-example-isvc-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) grpcurl -insecure -d '{"text": "At what temperature does liquid Nitrogen boil?"}' -H "mm-model-id: flan-t5-small-caikit" ${KSVC_HOSTNAME}:443 caikit.runtime.Nlp.NlpService/TextGenerationTaskPredict ``` @@ -214,16 +269,17 @@ Note: The **flan-t5-small** LLM model has been containerized into an S3 MinIO bu .... ``` -1. Remove the LLM model +4. Remove the LLM model - a. To remove (undeploy) the LLM model, delete the Inference Service. + a. To remove (undeploy) the LLM model, delete the Inference Service and its containing namespace: ```bash oc delete isvc --all -n ${TEST_NS} --force --grace-period=0 + oc delete ns ${TEST_NS} ``` b. Delete the MinIO resources by deleting the MinIO namespace. ```bash - oc delete ns ${TEST_NS} ${MINIO_NS} + oc delete ns ${MINIO_NS} ``` diff --git a/demo/kserve/scripts/test/deploy-model.sh b/demo/kserve/scripts/test/deploy-model.sh index 8f64b1c1..74d9dbc7 100755 --- a/demo/kserve/scripts/test/deploy-model.sh +++ b/demo/kserve/scripts/test/deploy-model.sh @@ -4,6 +4,29 @@ set -o nounset set -o errtrace # set -x #Uncomment this to debug script. +# Deploys model for HTTP (default) or gRPC if "grpc" is passed as argument + +# Check if at most one argument is passed +if [ "$#" -gt 1 ]; then + echo "Error: at most a single argument ('http' or 'grpc') or no argument, default protocol being 'http'" + exit 1 +fi + +# Default values that fit the default 'http' protocol: +INF_PROTO="" + +# If we have an argument, check that it is either "http" or "grpc" +if [ "$#" -eq 1 ]; then + if [ "$1" = "http" ]; then + : ### nothing to be done + elif [ "$1" = "grpc" ]; then + INF_PROTO="-grpc" + else + echo "Error: Argument must be either 'http' or 'grpc'." + exit 1 + fi +fi + source "$(dirname "$(realpath "$0")")/../env.sh" # Deploy Minio @@ -24,24 +47,28 @@ else fi sed "s//$MINIO_NS/g" ./custom-manifests/minio/serviceaccount-minio.yaml | tee ${BASE_DIR}/serviceaccount-minio-current.yaml -# Deploy a sample model +# Test if ${TEST_NS} namespace already exists: oc get ns ${TEST_NS} if [[ $? == 1 ]] then - oc new-project ${TEST_NS} - - oc apply -f ./custom-manifests/caikit/caikit-tgis-servingruntime.yaml -n ${TEST_NS} + oc new-project ${TEST_NS} + + oc apply -f ./custom-manifests/caikit/caikit-tgis-servingruntime"${INF_PROTO}".yaml -n ${TEST_NS} + + oc apply -f ${BASE_DIR}/minio-secret-current.yaml -n ${TEST_NS} + oc apply -f ${BASE_DIR}/serviceaccount-minio-current.yaml -n ${TEST_NS} - oc apply -f ${BASE_DIR}/minio-secret-current.yaml -n ${TEST_NS} - oc apply -f ${BASE_DIR}/serviceaccount-minio-current.yaml -n ${TEST_NS} + ### create the isvc. First step: create the yaml file + ISVC_NAME=caikit-tgis-isvc"${INF_PROTO}" + oc apply -f ./custom-manifests/caikit/"$ISVC_NAME".yaml -n ${TEST_NS} - oc apply -f ./custom-manifests/caikit/caikit-tgis-isvc.yaml -n ${TEST_NS} + # Resources needed to enable metrics for the model + # The metrics service needs the correct label in the `matchLabel` field. The expected value of this label is `-predictor-default` + # The metrics service in this repo is configured to work with the example model. If you are deploying a different model or using a different model name, change the label accordingly. - # Resources needed to enable metrics for the model - # The metrics service needs the correct label in the `matchLabel` field. The expected value of this label is `-predictor-default` - # The metrics service in this repo is configured to work with the example model. If you are deploying a different model or using a different model name, change the label accordingly. - oc apply -f custom-manifests/metrics/caikit-metrics-service.yaml -n ${TEST_NS} - oc apply -f custom-manifests/metrics/caikit-metrics-servicemonitor.yaml -n ${TEST_NS} + ### TBD: Following 2 line should take into account the changed names + # oc apply -f custom-manifests/metrics/caikit-metrics-service.yaml -n ${TEST_NS} + # oc apply -f custom-manifests/metrics/caikit-metrics-servicemonitor.yaml -n ${TEST_NS} else echo echo "* ${TEST_NS} exist. Please remove the namespace or use another namespace name" diff --git a/demo/kserve/scripts/test/grpc-call.sh b/demo/kserve/scripts/test/grpc-call.sh index 1feb0ef5..83323f89 100755 --- a/demo/kserve/scripts/test/grpc-call.sh +++ b/demo/kserve/scripts/test/grpc-call.sh @@ -2,26 +2,33 @@ set -o pipefail set -o nounset set -o errtrace +set -u ### any reference to an unset variable will be considered as an error and will immediately stop execution # set -x #Uncomment this to debug script. +# Performs inference using gRPC + source "$(dirname "$(realpath "$0")")/../env.sh" source "$(dirname "$(realpath "$0")")/../utils.sh" echo -echo "Wait until runtime is READY" +echo "Wait until grpc runtime is READY" -wait_for_pods_ready "serving.kserve.io/inferenceservice=caikit-example-isvc" "${TEST_NS}" -oc wait --for=condition=ready pod -l serving.kserve.io/inferenceservice=caikit-example-isvc -n ${TEST_NS} --timeout=300s +ISVC_NAME=caikit-tgis-isvc-grpc +wait_for_pods_ready "serving.kserve.io/inferenceservice=${ISVC_NAME}" "${TEST_NS}" +oc wait --for=condition=ready pod -l serving.kserve.io/inferenceservice=${ISVC_NAME} -n ${TEST_NS} --timeout=300s echo echo "Testing all token in a single call" echo -export KSVC_HOSTNAME=$(oc get ksvc caikit-example-isvc-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) -grpcurl -insecure -d '{"text": "At what temperature does liquid Nitrogen boil?"}' -H "mm-model-id: flan-t5-small-caikit" ${KSVC_HOSTNAME}:443 caikit.runtime.Nlp.NlpService/TextGenerationTaskPredict +export KSVC_HOSTNAME=$(oc get ksvc "${ISVC_NAME}"-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) + +### Invoke the inferences: +grpcurl -insecure -d '{"text": "At what temperature does Nitrogen boil?"}' -H "mm-model-id: flan-t5-small-caikit" ${KSVC_HOSTNAME}:443 caikit.runtime.Nlp.NlpService/TextGenerationTaskPredict echo echo "Testing streams of token" echo -grpcurl -insecure -d '{"text": "At what temperature does liquid Nitrogen boil?"}' -H "mm-model-id: flan-t5-small-caikit" ${KSVC_HOSTNAME}:443 caikit.runtime.Nlp.NlpService/ServerStreamingTextGenerationTaskPredict +grpcurl -insecure -d '{"text": "At what temperature does Nitrogen boil?"}' -H "mm-model-id: flan-t5-small-caikit" ${KSVC_HOSTNAME}:443 caikit.runtime.Nlp.NlpService/ServerStreamingTextGenerationTaskPredict + diff --git a/demo/kserve/scripts/test/http-call.sh b/demo/kserve/scripts/test/http-call.sh index 46890da5..7bfd20ca 100755 --- a/demo/kserve/scripts/test/http-call.sh +++ b/demo/kserve/scripts/test/http-call.sh @@ -2,22 +2,31 @@ set -o pipefail set -o nounset set -o errtrace +set -u ### any reference to an unset variable will be considered as an error and will immediately stop execution + + # set -x #Uncomment this to debug script. +# Performs inference using HTTP + source "$(dirname "$(realpath "$0")")/../env.sh" source "$(dirname "$(realpath "$0")")/../utils.sh" echo -echo "Wait until runtime is READY" +echo "Wait until http runtime is READY" -wait_for_pods_ready "serving.kserve.io/inferenceservice=caikit-example-isvc" "${TEST_NS}" -oc wait --for=condition=ready pod -l serving.kserve.io/inferenceservice=caikit-example-isvc -n ${TEST_NS} --timeout=300s +ISVC_NAME=caikit-tgis-isvc +wait_for_pods_ready "serving.kserve.io/inferenceservice=${ISVC_NAME}" "${TEST_NS}" +oc wait --for=condition=ready pod -l serving.kserve.io/inferenceservice=${ISVC_NAME} -n ${TEST_NS} --timeout=300s echo echo "Testing all token in a single call" echo -export KSVC_HOSTNAME=$(oc get ksvc caikit-example-isvc-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) +export KSVC_HOSTNAME=$(oc get ksvc "${ISVC_NAME}"-predictor -n ${TEST_NS} -o jsonpath='{.status.url}' | cut -d'/' -f3) + +### Invoke the inferences: + curl -kL -H 'Content-Type: application/json' -d '{"model_id": "flan-t5-small-caikit", "inputs": "At what temperature does Nitrogen boil?"}' https://${KSVC_HOSTNAME}/api/v1/task/text-generation echo @@ -25,3 +34,4 @@ echo "Testing streams of token" echo curl -kL -H 'Content-Type: application/json' -d '{"model_id": "flan-t5-small-caikit", "inputs": "At what temperature does Nitrogen boil?"}' https://${KSVC_HOSTNAME}/api/v1/task/server-streaming-text-generation + diff --git a/demo/kserve/scripts/uninstall/dependencies-uninstall.sh b/demo/kserve/scripts/uninstall/dependencies-uninstall.sh index 6bda5a42..057fe1f9 100755 --- a/demo/kserve/scripts/uninstall/dependencies-uninstall.sh +++ b/demo/kserve/scripts/uninstall/dependencies-uninstall.sh @@ -5,6 +5,8 @@ set -o errtrace # set -x #Uncomment this to debug script. source "$(dirname "$(realpath "$0")")/../env.sh" +export TEST_NS_HTTP=${TEST_NS}"-http" +export TEST_NS_GRPC=${TEST_NS}"-grpc" # Delete the Knative gateways oc delete -f custom-manifests/serverless/gateways.yaml @@ -45,4 +47,16 @@ oc delete csv OperatorGroup serverless-operators -n openshift-serverless oc delete project istio-system oc delete project knative-serving oc delete project knative-eventing -oc delete project $TEST_NS + +oc get ns ${TEST_NS_HTTP}} +if [[ $? == 0 ]] +then + oc delete project $TEST_NS_HTTP +fi + +oc get ns ${TEST_NS_GRPC}} +if [[ $? == 0 ]] +then + oc delete project $TEST_NS_GRPC +fi + diff --git a/demo/kserve/scripts/uninstall/kserve-uninstall.sh b/demo/kserve/scripts/uninstall/kserve-uninstall.sh index 88d56e65..72cdaf35 100755 --- a/demo/kserve/scripts/uninstall/kserve-uninstall.sh +++ b/demo/kserve/scripts/uninstall/kserve-uninstall.sh @@ -1,11 +1,14 @@ - #!/bin/bash set -o pipefail set -o nounset set -o errtrace # set -x #Uncomment this to debug script. +# Uninstalls the minio namespace as well as protocol specific namespaces such as kserve-demo-http or hserve-demo-grpc + source "$(dirname "$(realpath "$0")")/../env.sh" +TEST_NS_HTTP=${TEST_NS}"-http" +TEST_NS_GRPC=${TEST_NS}"-grpc" if [[ ! -n "${TARGET_OPERATOR+x}" ]] then @@ -27,10 +30,11 @@ export TARGET_OPERATOR_NS=$(getOpNS ${TARGET_OPERATOR_TYPE}) oc delete validatingwebhookconfiguration inferencegraph.serving.kserve.io inferenceservice.serving.kserve.io oc delete mutatingwebhookconfiguration inferenceservice.serving.kserve.io -oc delete isvc,pod --all -n ${TEST_NS} --force --grace-period=0 +oc delete isvc,pod --all -n ${TEST_NS_HTTP} --force --grace-period=0 +oc delete isvc,pod --all -n ${TEST_NS_GRPC} --force --grace-period=0 echo "It would take around around 3~4 mins" -oc delete ns ${TEST_NS} ${MINIO_NS} --force --grace-period=0 +oc delete ns ${TEST_NS_HTTP} ${TEST_NS_GRPC} ${MINIO_NS} --force --grace-period=0 oc delete secret wildcard-certs -n istio-system oc delete DataScienceCluster --all -n "${KSERVE_OPERATOR_NS}"