diff --git a/eng/versioning/version_client.txt b/eng/versioning/version_client.txt
index 9a13937e1fdb9..0f8728300f7c3 100644
--- a/eng/versioning/version_client.txt
+++ b/eng/versioning/version_client.txt
@@ -105,10 +105,10 @@ com.azure:azure-cosmos;4.56.0;4.57.0-beta.1
com.azure:azure-cosmos-benchmark;4.0.1-beta.1;4.0.1-beta.1
com.azure:azure-cosmos-dotnet-benchmark;4.0.1-beta.1;4.0.1-beta.1
com.azure.cosmos.spark:azure-cosmos-spark_3_2-12;1.0.0-beta.1;1.0.0-beta.1
-com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12;4.28.2;4.28.3
-com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12;4.28.2;4.28.3
-com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.28.2;4.28.3
-com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.28.2;4.28.3
+com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12;4.28.2;4.28.4
+com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12;4.28.2;4.28.4
+com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12;4.28.2;4.28.4
+com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12;4.28.2;4.28.4
com.azure:azure-cosmos-encryption;2.8.0;2.9.0-beta.1
com.azure:azure-cosmos-test;1.0.0-beta.6;1.0.0-beta.7
com.azure:azure-cosmos-tests;1.0.0-beta.1;1.0.0-beta.1
diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md
index 73b2b33b19fe5..2b5d128a904f8 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md
@@ -1,5 +1,10 @@
## Release History
+### 4.28.4 (2024-03-18)
+
+#### Other Changes
+* Increased queue length of Scheduler in `BulkWriter` by using different schedulers to handle request and response to avoid `ReactorRejectedExecutionException: Scheduler unavailable` error message. - See [PR 39260](https://github.com/Azure/azure-sdk-for-java/pull/39260)
+
### 4.28.3 (2024-03-12)
#### Other Changes
diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/README.md
index 416a6d3cca486..4b0a6b7687622 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/README.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/README.md
@@ -29,6 +29,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-1_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|--------------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.1.1 - 3.1.2 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.3 | 3.1.1 - 3.1.2 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.2 | 3.1.1 - 3.1.2 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.1 | 3.1.1 - 3.1.2 | [8, 11] | 2.12 | 8.\*, 9.\* |
@@ -91,6 +92,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-2_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -138,6 +140,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-3_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.3 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.2 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
@@ -167,6 +170,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-4_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.4.0 | [8, 11] | 2.12 | 13.\* |
| 4.28.3 | 3.4.0 | [8, 11] | 2.12 | 13.\* |
| 4.28.2 | 3.4.0 | [8, 11] | 2.12 | 13.\* |
| 4.28.1 | 3.4.0 | [8, 11] | 2.12 | 13.\* |
@@ -188,11 +192,11 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
### Download
You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime 8 from Maven:
-`com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.28.3`
+`com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.28.4`
You can also integrate against Cosmos DB Spark Connector in your SBT project:
```scala
-libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-1_2-12" % "4.28.3"
+libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-1_2-12" % "4.28.4"
```
Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark).
diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/pom.xml
index 348f9bbff12e1..cf8ec1dc03276 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/pom.xml
@@ -11,7 +11,7 @@
com.azure.cosmos.spark
azure-cosmos-spark_3-1_2-12
- 4.28.3
+ 4.28.4
jar
https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-1_2-12
OLTP Spark 3.1 Connector for Azure Cosmos DB SQL API
diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md
index f377ec1c5a778..0c7c7e8bbec31 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md
@@ -1,5 +1,10 @@
## Release History
+### 4.28.4 (2024-03-18)
+
+#### Other Changes
+* Increased queue length of Scheduler in `BulkWriter` by using different schedulers to handle request and response to avoid `ReactorRejectedExecutionException: Scheduler unavailable` error message. - See [PR 39260](https://github.com/Azure/azure-sdk-for-java/pull/39260)
+
### 4.28.3 (2024-03-12)
#### Other Changes
diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/README.md
index d973d3a593037..42df32000bc12 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/README.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/README.md
@@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-2_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -75,6 +76,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-3_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.3 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.2 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
@@ -104,6 +106,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-1_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|--------------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 8.\*, 9.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 8.\*, 9.\* |
@@ -166,6 +169,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-4_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.3 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.2 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.1 | 3.4.0 | [8, 11] | 2.12 | 13.* |
@@ -186,11 +190,11 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
### Download
You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime 10 from Maven:
-`com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12:4.28.3`
+`com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12:4.28.4`
You can also integrate against Cosmos DB Spark Connector in your SBT project:
```scala
-libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-2_2-12" % "4.28.3"
+libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-2_2-12" % "4.28.4"
```
Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark).
diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/pom.xml
index e810ffb08b59e..d14d547696ba0 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/pom.xml
@@ -11,7 +11,7 @@
com.azure.cosmos.spark
azure-cosmos-spark_3-2_2-12
- 4.28.3
+ 4.28.4
jar
https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-2_2-12
OLTP Spark 3.2 Connector for Azure Cosmos DB SQL API
diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
index 556211be419ba..9a8146fb64835 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
@@ -1,5 +1,10 @@
## Release History
+### 4.28.4 (2024-03-18)
+
+#### Other Changes
+* Increased queue length of Scheduler in `BulkWriter` by using different schedulers to handle request and response to avoid `ReactorRejectedExecutionException: Scheduler unavailable` error message. - See [PR 39260](https://github.com/Azure/azure-sdk-for-java/pull/39260)
+
### 4.28.3 (2024-03-12)
#### Other Changes
diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md
index 93169c1208571..65e578c697427 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/README.md
@@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-3_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.3 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.2 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
@@ -57,6 +58,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-2_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -104,6 +106,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-1_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|--------------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -166,6 +169,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-4_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.3 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.2 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.1 | 3.4.0 | [8, 11] | 2.12 | 13.* |
@@ -186,11 +190,11 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
### Download
You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime 11 from Maven:
-`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.28.3`
+`com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.28.4`
You can also integrate against Cosmos DB Spark Connector in your SBT project:
```scala
-libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.28.3"
+libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-3_2-12" % "4.28.4"
```
Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark).
diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml
index 51e26e974238b..6a3469fbb800a 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/pom.xml
@@ -11,7 +11,7 @@
com.azure.cosmos.spark
azure-cosmos-spark_3-3_2-12
- 4.28.3
+ 4.28.4
jar
https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-3_2-12
OLTP Spark 3.3 Connector for Azure Cosmos DB SQL API
diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
index c8e1ce708d3ed..9c43b348edae2 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
@@ -1,5 +1,10 @@
## Release History
+### 4.28.4 (2024-03-18)
+
+#### Other Changes
+* Increased queue length of Scheduler in `BulkWriter` by using different schedulers to handle request and response to avoid `ReactorRejectedExecutionException: Scheduler unavailable` error message. - See [PR 39260](https://github.com/Azure/azure-sdk-for-java/pull/39260)
+
### 4.28.3 (2024-03-12)
#### Other Changes
diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md
index 9f731eec2fd4f..04b0baa8eb176 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/README.md
@@ -28,6 +28,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-4_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.3 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.2 | 3.4.0 | [8, 11] | 2.12 | 13.* |
| 4.28.1 | 3.4.0 | [8, 11] | 2.12 | 13.* |
@@ -48,6 +49,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-3_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.3 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.2 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
| 4.28.1 | 3.3.0 | [8, 11] | 2.12 | 11.\*, 12.\* |
@@ -77,6 +79,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-2_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|-----------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -124,6 +127,7 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
#### azure-cosmos-spark_3-1_2-12
| Connector | Supported Spark Versions | Supported JVM Versions | Supported Scala Versions | Supported Databricks Runtimes |
|--------------|--------------------------|------------------------|--------------------------|-------------------------------|
+| 4.28.4 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.3 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.2 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
| 4.28.1 | 3.2.0 - 3.2.1 | [8, 11] | 2.12 | 10.\* |
@@ -186,11 +190,11 @@ https://github.com/Azure/azure-sdk-for-java/issues/new
### Download
You can use the maven coordinate of the jar to auto install the Spark Connector to your Databricks Runtime 11 from Maven:
-`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.28.3`
+`com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.28.4`
You can also integrate against Cosmos DB Spark Connector in your SBT project:
```scala
-libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.28.3"
+libraryDependencies += "com.azure.cosmos.spark" % "azure-cosmos-spark_3-4_2-12" % "4.28.4"
```
Cosmos DB Spark Connector is available on [Maven Central Repo](https://central.sonatype.com/search?namespace=com.azure.cosmos.spark).
diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml
index 129e0e095c9bb..b6ed920202fea 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/pom.xml
@@ -11,7 +11,7 @@
com.azure.cosmos.spark
azure-cosmos-spark_3-4_2-12
- 4.28.3
+ 4.28.4
jar
https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/cosmos/azure-cosmos-spark_3-4_2-12
OLTP Spark 3.4 Connector for Azure Cosmos DB SQL API
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/docs/quick-start.md b/sdk/cosmos/azure-cosmos-spark_3_2-12/docs/quick-start.md
index 62d7a92b37e36..203440c0452e2 100644
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/docs/quick-start.md
+++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/docs/quick-start.md
@@ -26,16 +26,16 @@ You can use any other Spark 3.1.1 spark offering as well, also you should be abl
SLF4J is only needed if you plan to use logging, please also download an SLF4J binding which will link the SLF4J API with the logging implementation of your choice. See the [SLF4J user manual](https://www.slf4j.org/manual.html) for more information.
For Spark 3.1:
-- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.28.3](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-1_2-12/4.28.3/jar)
+- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.28.4](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-1_2-12/4.28.4/jar)
For Spark 3.2:
-- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12:4.28.3](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-2_2-12/4.28.3/jar)
+- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-2_2-12:4.28.4](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-2_2-12/4.28.4/jar)
For Spark 3.3:
-- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.28.3](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.28.3/jar)
+- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-3_2-12:4.28.4](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-3_2-12/4.28.4/jar)
For Spark 3.4:
-- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.28.3](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.28.3/jar)
+- Install Cosmos DB Spark Connector, in your spark Cluster [com.azure.cosmos.spark:azure-cosmos-spark_3-4_2-12:4.28.4](https://search.maven.org/artifact/com.azure.cosmos.spark/azure-cosmos-spark_3-4_2-12/4.28.4/jar)
The getting started guide is based on PySpark however you can use the equivalent scala version as well, and you can run the following code snippet in an Azure Databricks PySpark notebook.
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/BulkWriter.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/BulkWriter.scala
index f097230c3deac..f0622e77aa123 100644
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/BulkWriter.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/BulkWriter.scala
@@ -8,8 +8,7 @@ import com.azure.cosmos.{BridgeInternal, CosmosAsyncContainer, CosmosDiagnostics
import com.azure.cosmos.implementation.apachecommons.lang.StringUtils
import com.azure.cosmos.implementation.batch.{BatchRequestResponseConstants, BulkExecutorDiagnosticsTracker, ItemBulkOperation}
import com.azure.cosmos.models._
-import com.azure.cosmos.spark.BulkWriter.{BulkOperationFailedException, bulkWriterBoundedElastic, getThreadInfo, readManyBoundedElastic}
-import com.azure.cosmos.spark.CosmosConstants.StatusCodes
+import com.azure.cosmos.spark.BulkWriter.{BulkOperationFailedException, bulkWriterRequestsBoundedElastic, bulkWriterResponsesBoundedElastic, getThreadInfo, readManyBoundedElastic}
import com.azure.cosmos.spark.diagnostics.DefaultDiagnostics
import reactor.core.Scannable
import reactor.core.publisher.Mono
@@ -555,9 +554,10 @@ private class BulkWriter(container: CosmosAsyncContainer,
val bulkOperationResponseFlux: SFlux[CosmosBulkOperationResponse[Object]] =
container
.executeBulkOperations[Object](
- bulkInputEmitter.asFlux().publishOn(bulkWriterBoundedElastic),
+ bulkInputEmitter.asFlux().publishOn(bulkWriterRequestsBoundedElastic),
cosmosBulkExecutionOptions)
- .publishOn(bulkWriterBoundedElastic)
+ .onBackpressureBuffer()
+ .publishOn(bulkWriterResponsesBoundedElastic)
.asScala
bulkOperationResponseFlux.subscribe(
@@ -1252,7 +1252,8 @@ private object BulkWriter {
private val maxDelayOn408RequestTimeoutInMs = 10000
private val minDelayOn408RequestTimeoutInMs = 1000
private val maxItemOperationsToShowInErrorMessage = 10
- private val BULK_WRITER_BOUNDED_ELASTIC_THREAD_NAME = "bulk-writer-bounded-elastic"
+ private val BULK_WRITER_REQUESTS_BOUNDED_ELASTIC_THREAD_NAME = "bulk-writer-requests-bounded-elastic"
+ private val BULK_WRITER_RESPONSES_BOUNDED_ELASTIC_THREAD_NAME = "bulk-writer-responses-bounded-elastic"
private val READ_MANY_BOUNDED_ELASTIC_THREAD_NAME = "read-many-bounded-elastic"
private val TTL_FOR_SCHEDULER_WORKER_IN_SECONDS = 60 // same as BoundedElasticScheduler.DEFAULT_TTL_SECONDS
@@ -1308,17 +1309,25 @@ private object BulkWriter {
private val bulkProcessingThresholds = new CosmosBulkExecutionThresholdsState()
+ // Custom bounded elastic scheduler to consume input flux
+ val bulkWriterRequestsBoundedElastic: Scheduler = Schedulers.newBoundedElastic(
+ Schedulers.DEFAULT_BOUNDED_ELASTIC_SIZE,
+ Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE + DefaultMaxPendingOperationPerCore,
+ BULK_WRITER_REQUESTS_BOUNDED_ELASTIC_THREAD_NAME,
+ TTL_FOR_SCHEDULER_WORKER_IN_SECONDS, true)
+
// Custom bounded elastic scheduler to switch off IO thread to process response.
- val bulkWriterBoundedElastic: Scheduler = Schedulers.newBoundedElastic(
- 2 * Schedulers.DEFAULT_BOUNDED_ELASTIC_SIZE,
- Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
- BULK_WRITER_BOUNDED_ELASTIC_THREAD_NAME,
+ val bulkWriterResponsesBoundedElastic: Scheduler = Schedulers.newBoundedElastic(
+ Schedulers.DEFAULT_BOUNDED_ELASTIC_SIZE,
+ Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE + DefaultMaxPendingOperationPerCore,
+ BULK_WRITER_RESPONSES_BOUNDED_ELASTIC_THREAD_NAME,
TTL_FOR_SCHEDULER_WORKER_IN_SECONDS, true)
+
// Custom bounded elastic scheduler to switch off IO thread to process response.
val readManyBoundedElastic: Scheduler = Schedulers.newBoundedElastic(
2 * Schedulers.DEFAULT_BOUNDED_ELASTIC_SIZE,
- Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE,
+ Schedulers.DEFAULT_BOUNDED_ELASTIC_QUEUESIZE + DefaultMaxPendingOperationPerCore,
READ_MANY_BOUNDED_ELASTIC_THREAD_NAME,
TTL_FOR_SCHEDULER_WORKER_IN_SECONDS, true)