From 95d47502491ef1efa257275604686665f4e33dfb Mon Sep 17 00:00:00 2001
From: Nick Knize <nknize@apache.org>
Date: Sat, 12 Mar 2022 15:03:00 -0500
Subject: [PATCH] [Remove] types from translog (#2439)

Removes persisting the type in the translog since types are no longer supported.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
---
 .../action/PainlessExecuteAction.java         |   3 +-
 .../RankFeatureMetaFieldMapperTests.java      |   2 +-
 .../mapper/ScaledFloatFieldMapperTests.java   |   9 -
 .../mapper/ParentJoinFieldMapperTests.java    |  27 +-
 .../percolator/PercolateQueryBuilder.java     |   2 +-
 .../PercolatorFieldMapperTests.java           |  17 --
 .../documentation/ReindexDocumentationIT.java |   8 +-
 .../index/mapper/size/SizeMappingTests.java   |   6 +-
 .../20_missing_field.yml                      |   2 +
 .../action/termvectors/GetTermVectorsIT.java  |   2 -
 .../opensearch/index/shard/IndexShardIT.java  |   4 +-
 .../action/bulk/TransportShardBulkAction.java |  12 +-
 .../cluster/metadata/IndexMetadata.java       |  14 +-
 .../metadata/MetadataMappingService.java      |  15 +-
 .../org/opensearch/index/IndexingSlowLog.java |   2 -
 .../org/opensearch/index/engine/Engine.java   |  26 +-
 .../opensearch/index/engine/EngineConfig.java |   2 +-
 .../index/engine/InternalEngine.java          |   9 +-
 .../index/engine/LuceneChangesSnapshot.java   |   4 +-
 .../opensearch/index/get/ShardGetService.java |   1 -
 .../index/mapper/DocumentMapper.java          |   6 +-
 .../index/mapper/DocumentParser.java          |  15 --
 .../index/mapper/ParsedDocument.java          |   8 +-
 .../index/mapper/SourceToParse.java           |  13 +-
 .../index/mapper/TypeFieldMapper.java         |   2 +-
 .../opensearch/index/shard/IndexShard.java    |  96 +------
 .../index/termvectors/TermVectorsService.java |   2 +-
 .../opensearch/index/translog/Translog.java   |  84 +++----
 .../index/translog/TranslogWriter.java        |   2 -
 .../bulk/TransportShardBulkActionTests.java   |   3 +-
 .../get/TransportMultiGetActionTests.java     |   2 -
 .../resync/ResyncReplicationRequestTests.java |   2 +-
 ...TransportResyncReplicationActionTests.java |   2 +-
 ...ReplicationAllPermitsAcquisitionTests.java |   2 +-
 .../TransportMultiTermVectorsActionTests.java |   2 -
 .../MetadataCreateDataStreamServiceTests.java |   2 +-
 .../cluster/metadata/MetadataTests.java       |  10 +-
 .../metadata/ToAndFromJsonMetadataTests.java  |   3 +-
 .../index/IndexingSlowLogTests.java           |   4 -
 .../index/engine/InternalEngineTests.java     | 103 +++-----
 .../engine/LuceneChangesSnapshotTests.java    |   6 +-
 .../index/engine/NoOpEngineTests.java         |   2 +-
 .../index/engine/ReadOnlyEngineTests.java     |   2 +-
 .../fielddata/BinaryDVFieldDataTests.java     |   8 +-
 .../mapper/DataStreamFieldMapperTests.java    |   5 -
 .../index/mapper/DocumentParserTests.java     |  11 +-
 .../index/mapper/DynamicMappingTests.java     |   2 +-
 .../mapper/FieldNamesFieldMapperTests.java    |   3 -
 .../GenericStoreDynamicTemplateTests.java     |   2 +-
 .../index/mapper/IdFieldMapperTests.java      |   3 +-
 .../index/mapper/IndexFieldMapperTests.java   |   1 -
 .../index/mapper/IpRangeFieldMapperTests.java |   1 -
 .../mapper/JavaMultiFieldMergeTests.java      |  16 +-
 .../index/mapper/MultiFieldTests.java         |   7 +-
 .../index/mapper/NestedObjectMapperTests.java |  22 +-
 .../mapper/NullValueObjectMappingTests.java   |   3 -
 .../index/mapper/ObjectMapperTests.java       |   1 -
 .../mapper/PathMatchDynamicTemplateTests.java |   2 +-
 .../index/mapper/RoutingFieldMapperTests.java |   2 -
 .../index/mapper/SourceFieldMapperTests.java  |   8 +-
 .../mapper/StoredNumericValuesTests.java      |   1 -
 .../index/mapper/TypeFieldMapperTests.java    |   4 +-
 .../IndexLevelReplicationTests.java           |  17 +-
 .../RecoveryDuringReplicationTests.java       |  11 +-
 .../index/shard/IndexShardTests.java          |  72 +++---
 .../shard/IndexingOperationListenerTests.java |   2 +-
 .../shard/PrimaryReplicaSyncerTests.java      |   4 +-
 .../index/shard/RefreshListenersTests.java    |  12 +-
 .../RemoveCorruptedShardDataCommandTests.java |   2 +-
 .../index/shard/ShardGetServiceTests.java     |  13 +-
 .../index/translog/TranslogTests.java         | 234 ++++++------------
 .../IndexingMemoryControllerTests.java        |   2 +-
 .../PeerRecoveryTargetServiceTests.java       |   2 +-
 .../recovery/RecoverySourceHandlerTests.java  |   5 +-
 .../indices/recovery/RecoveryTests.java       |  18 +-
 .../CategoryContextMappingTests.java          |  12 +-
 .../completion/GeoContextMappingTests.java    |   5 +-
 .../index/engine/EngineTestCase.java          |  47 +---
 .../index/engine/TranslogHandler.java         |  34 +--
 .../index/mapper/MapperServiceTestCase.java   |   4 +-
 ...enSearchIndexLevelReplicationTestCase.java |  17 +-
 .../index/shard/IndexShardTestCase.java       |  30 +--
 82 files changed, 334 insertions(+), 851 deletions(-)

diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java
index 4999d5d444673..be26e69ec22d1 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java
@@ -593,10 +593,9 @@ private static Response prepareRamIndex(
             try (Directory directory = new ByteBuffersDirectory()) {
                 try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) {
                     String index = indexService.index().getName();
-                    String type = indexService.mapperService().documentMapper().type();
                     BytesReference document = request.contextSetup.document;
                     XContentType xContentType = request.contextSetup.xContentType;
-                    SourceToParse sourceToParse = new SourceToParse(index, type, "_id", document, xContentType);
+                    SourceToParse sourceToParse = new SourceToParse(index, "_id", document, xContentType);
                     ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse);
                     indexWriter.addDocuments(parsedDocument.docs());
                     try (IndexReader indexReader = DirectoryReader.open(indexWriter)) {
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
index 46e71096ba307..3161e7462d2a0 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java
@@ -91,7 +91,7 @@ public void testDocumentParsingFailsOnMetaField() throws Exception {
         BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
         MapperParsingException e = expectThrows(
             MapperParsingException.class,
-            () -> mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON))
+            () -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON))
         );
         assertTrue(
             e.getCause().getMessage().contains("Field [" + rfMetaField + "] is a metadata field and cannot be added inside a document.")
diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
index b3db286d39dac..3de322b286183 100644
--- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
+++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java
@@ -134,7 +134,6 @@ public void testNotIndexed() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
                 XContentType.JSON
@@ -156,7 +155,6 @@ public void testNoDocValues() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
                 XContentType.JSON
@@ -178,7 +176,6 @@ public void testStore() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()),
                 XContentType.JSON
@@ -202,7 +199,6 @@ public void testCoerce() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()),
                 XContentType.JSON
@@ -222,7 +218,6 @@ public void testCoerce() throws Exception {
         ThrowingRunnable runnable = () -> mapper2.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()),
                 XContentType.JSON
@@ -246,7 +241,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains
         ThrowingRunnable runnable = () -> mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
                 XContentType.JSON
@@ -261,7 +255,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains
         ParsedDocument doc = mapper2.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()),
                 XContentType.JSON
@@ -277,7 +270,6 @@ public void testNullValue() throws IOException {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
                 XContentType.JSON
@@ -291,7 +283,6 @@ public void testNullValue() throws IOException {
         doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()),
                 XContentType.JSON
diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
index 628345a625d1b..a9ac151dd3806 100644
--- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
+++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java
@@ -80,13 +80,7 @@ public void testSingleLevel() throws Exception {
 
         // Doc without join
         ParsedDocument doc = docMapper.parse(
-            new SourceToParse(
-                "test",
-                "type",
-                "0",
-                BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
-                XContentType.JSON
-            )
+            new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)
         );
         assertNull(doc.rootDoc().getBinaryValue("join_field"));
 
@@ -94,7 +88,6 @@ public void testSingleLevel() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()),
                 XContentType.JSON
@@ -107,7 +100,6 @@ public void testSingleLevel() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "2",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -131,7 +123,6 @@ public void testSingleLevel() throws Exception {
             () -> docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()),
                     XContentType.JSON
@@ -161,7 +152,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "2",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -181,7 +171,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "2",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -222,13 +211,7 @@ public void testMultipleLevels() throws Exception {
 
         // Doc without join
         ParsedDocument doc = docMapper.parse(
-            new SourceToParse(
-                "test",
-                "type",
-                "0",
-                BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
-                XContentType.JSON
-            )
+            new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON)
         );
         assertNull(doc.rootDoc().getBinaryValue("join_field"));
 
@@ -236,7 +219,6 @@ public void testMultipleLevels() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()),
                 XContentType.JSON
@@ -249,7 +231,6 @@ public void testMultipleLevels() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "2",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -274,7 +255,6 @@ public void testMultipleLevels() throws Exception {
             () -> docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "2",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "child").endObject()),
                     XContentType.JSON,
@@ -290,7 +270,6 @@ public void testMultipleLevels() throws Exception {
             () -> docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "2",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder()
@@ -311,7 +290,6 @@ public void testMultipleLevels() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "3",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -335,7 +313,6 @@ public void testMultipleLevels() throws Exception {
             () -> docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()),
                     XContentType.JSON
diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
index 75a7757ba8a5a..87f08e2ff50fc 100644
--- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
+++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java
@@ -586,7 +586,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException {
         }
         docMapper = mapperService.documentMapper();
         for (BytesReference document : documents) {
-            docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType)));
+            docs.add(docMapper.parse(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType)));
         }
 
         FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer();
diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
index 691c3b648cd6a..2c0aa593317b4 100644
--- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
+++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java
@@ -553,7 +553,6 @@ public void testPercolatorFieldMapper() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
                     XContentType.JSON
@@ -574,7 +573,6 @@ public void testPercolatorFieldMapper() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
                     XContentType.JSON
@@ -592,7 +590,6 @@ public void testPercolatorFieldMapper() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
                     XContentType.JSON
@@ -621,7 +618,6 @@ public void testStoringQueries() throws Exception {
                 .parse(
                     new SourceToParse(
                         "test",
-                        MapperService.SINGLE_MAPPING_NAME,
                         "1",
                         BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()),
                         XContentType.JSON
@@ -640,7 +636,6 @@ public void testQueryWithRewrite() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()),
                     XContentType.JSON
@@ -665,7 +660,6 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception {
                 .parse(
                     new SourceToParse(
                         "test",
-                        MapperService.SINGLE_MAPPING_NAME,
                         "1",
                         BytesReference.bytes(
                             XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject()
@@ -684,7 +678,6 @@ public void testPercolatorFieldMapper_noQuery() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()),
                     XContentType.JSON
@@ -697,7 +690,6 @@ public void testPercolatorFieldMapper_noQuery() throws Exception {
                 .parse(
                     new SourceToParse(
                         "test",
-                        MapperService.SINGLE_MAPPING_NAME,
                         "1",
                         BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()),
                         XContentType.JSON
@@ -760,7 +752,6 @@ public void testMultiplePercolatorFields() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    typeName,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject().field("query_field1", queryBuilder).field("query_field2", queryBuilder).endObject()
@@ -803,7 +794,6 @@ public void testNestedPercolatorField() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    typeName,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject().startObject("object_field").field("query_field", queryBuilder).endObject().endObject()
@@ -823,7 +813,6 @@ public void testNestedPercolatorField() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    typeName,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject()
@@ -846,7 +835,6 @@ public void testNestedPercolatorField() throws Exception {
                 .parse(
                     new SourceToParse(
                         "test",
-                        typeName,
                         "1",
                         BytesReference.bytes(
                             jsonBuilder().startObject()
@@ -954,7 +942,6 @@ public void testImplicitlySetDefaultScriptLang() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder()
@@ -1002,7 +989,6 @@ public void testImplicitlySetDefaultScriptLang() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder()
@@ -1097,7 +1083,6 @@ public void testDuplicatedClauses() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
                     XContentType.JSON
@@ -1123,7 +1108,6 @@ public void testDuplicatedClauses() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
                     XContentType.JSON
@@ -1152,7 +1136,6 @@ public void testDuplicatedClauses() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()),
                     XContentType.JSON
diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
index 08bc18442b760..6d313e06263b3 100644
--- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
+++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java
@@ -342,16 +342,16 @@ public static class BlockingOperationListener implements IndexingOperationListen
 
         @Override
         public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
-            return preCheck(index, index.type());
+            return preCheck(index);
         }
 
         @Override
         public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
-            return preCheck(delete, delete.type());
+            return preCheck(delete);
         }
 
-        private <T extends Engine.Operation> T preCheck(T operation, String type) {
-            if (("_doc".equals(type) == false) || (operation.origin() != Engine.Operation.Origin.PRIMARY)) {
+        private <T extends Engine.Operation> T preCheck(T operation) {
+            if ((operation.origin() != Engine.Operation.Origin.PRIMARY)) {
                 return operation;
             }
 
diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
index c1fb3d8083151..4e4648a87fbfc 100644
--- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
+++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java
@@ -66,7 +66,7 @@ public void testSizeEnabled() throws Exception {
         DocumentMapper docMapper = service.mapperService().documentMapper();
 
         BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
-        ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
+        ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
 
         boolean stored = false;
         boolean points = false;
@@ -83,7 +83,7 @@ public void testSizeDisabled() throws Exception {
         DocumentMapper docMapper = service.mapperService().documentMapper();
 
         BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
-        ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
+        ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
 
         assertThat(doc.rootDoc().getField("_size"), nullValue());
     }
@@ -93,7 +93,7 @@ public void testSizeNotSet() throws Exception {
         DocumentMapper docMapper = service.mapperService().documentMapper();
 
         BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject());
-        ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON));
+        ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON));
 
         assertThat(doc.rootDoc().getField("_size"), nullValue());
     }
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
index a65908b238013..2f15334f882a9 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml
@@ -14,5 +14,7 @@
       indices.get_field_mapping:
         index: test_index
         fields: not_existent
+        ignore: 404    # ignore 404 failures for now
+                       # see: https://github.com/opensearch-project/OpenSearch/issues/2440
 
   - match: { 'test_index.mappings': {}}
diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
index d7017122d221c..d28dcbb924f95 100644
--- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java
@@ -81,14 +81,12 @@ protected Collection<Class<? extends Plugin>> nodePlugins() {
 
     public void testNoSuchDoc() throws Exception {
         XContentBuilder mapping = jsonBuilder().startObject()
-            .startObject("type1")
             .startObject("properties")
             .startObject("field")
             .field("type", "text")
             .field("term_vector", "with_positions_offsets_payloads")
             .endObject()
             .endObject()
-            .endObject()
             .endObject();
         assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping));
 
diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
index 4c200720a3af6..0e915577dc467 100644
--- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
+++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java
@@ -375,7 +375,7 @@ public void testMaybeFlush() throws Exception {
         shard.applyIndexOperationOnPrimary(
             Versions.MATCH_ANY,
             VersionType.INTERNAL,
-            new SourceToParse("test", "_doc", "1", new BytesArray("{}"), XContentType.JSON),
+            new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON),
             SequenceNumbers.UNASSIGNED_SEQ_NO,
             0,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
@@ -460,7 +460,7 @@ public void testMaybeRollTranslogGeneration() throws Exception {
             final Engine.IndexResult result = shard.applyIndexOperationOnPrimary(
                 Versions.MATCH_ANY,
                 VersionType.INTERNAL,
-                new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray("{}"), XContentType.JSON),
+                new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON),
                 SequenceNumbers.UNASSIGNED_SEQ_NO,
                 0,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java
index f3ab9673a0201..cc9f20b7aa256 100644
--- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java
+++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java
@@ -340,7 +340,6 @@ static boolean executeBulkItemRequest(
             final DeleteRequest request = context.getRequestToExecute();
             result = primary.applyDeleteOperationOnPrimary(
                 version,
-                MapperService.SINGLE_MAPPING_NAME,
                 request.id(),
                 request.versionType(),
                 request.ifSeqNo(),
@@ -351,14 +350,7 @@ static boolean executeBulkItemRequest(
             result = primary.applyIndexOperationOnPrimary(
                 version,
                 request.versionType(),
-                new SourceToParse(
-                    request.index(),
-                    MapperService.SINGLE_MAPPING_NAME,
-                    request.id(),
-                    request.source(),
-                    request.getContentType(),
-                    request.routing()
-                ),
+                new SourceToParse(request.index(), request.id(), request.source(), request.getContentType(), request.routing()),
                 request.ifSeqNo(),
                 request.ifPrimaryTerm(),
                 request.getAutoGeneratedTimestamp(),
@@ -601,7 +593,6 @@ private static Engine.Result performOpOnReplica(
                 final ShardId shardId = replica.shardId();
                 final SourceToParse sourceToParse = new SourceToParse(
                     shardId.getIndexName(),
-                    MapperService.SINGLE_MAPPING_NAME,
                     indexRequest.id(),
                     indexRequest.source(),
                     indexRequest.getContentType(),
@@ -622,7 +613,6 @@ private static Engine.Result performOpOnReplica(
                     primaryResponse.getSeqNo(),
                     primaryResponse.getPrimaryTerm(),
                     primaryResponse.getVersion(),
-                    MapperService.SINGLE_MAPPING_NAME,
                     deleteRequest.id()
                 );
                 break;
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
index c02358d47b066..a7f351a918ae5 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java
@@ -67,6 +67,7 @@
 import org.opensearch.common.xcontent.XContentParser;
 import org.opensearch.gateway.MetadataStateFormat;
 import org.opensearch.index.Index;
+import org.opensearch.index.mapper.MapperService;
 import org.opensearch.index.seqno.SequenceNumbers;
 import org.opensearch.index.shard.ShardId;
 import org.opensearch.rest.RestStatus;
@@ -1159,12 +1160,17 @@ public Builder settings(Settings settings) {
             return this;
         }
 
-        public MappingMetadata mapping(String type) {
-            return mappings.get(type);
+        public MappingMetadata mapping() {
+            return mappings.get(MapperService.SINGLE_MAPPING_NAME);
         }
 
-        public Builder putMapping(String type, String source) throws IOException {
-            putMapping(new MappingMetadata(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true)));
+        public Builder putMapping(String source) throws IOException {
+            putMapping(
+                new MappingMetadata(
+                    MapperService.SINGLE_MAPPING_NAME,
+                    XContentHelper.convertToMap(XContentFactory.xContent(source), source, true)
+                )
+            );
             return this;
         }
 
diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java
index 69145bdee72b2..3795961d39143 100644
--- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java
+++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java
@@ -185,22 +185,11 @@ private boolean refreshIndexMapping(IndexService indexService, IndexMetadata.Bui
         boolean dirty = false;
         String index = indexService.index().getName();
         try {
-            List<String> updatedTypes = new ArrayList<>();
             MapperService mapperService = indexService.mapperService();
             DocumentMapper mapper = mapperService.documentMapper();
             if (mapper != null) {
-                final String type = mapper.type();
-                if (!mapper.mappingSource().equals(builder.mapping(type).source())) {
-                    updatedTypes.add(type);
-                }
-            }
-
-            // if a single type is not up-to-date, re-send everything
-            if (updatedTypes.isEmpty() == false) {
-                logger.warn("[{}] re-syncing mappings with cluster state because of types [{}]", index, updatedTypes);
-                dirty = true;
-                if (mapper != null) {
-                    builder.putMapping(new MappingMetadata(mapper));
+                if (mapper.mappingSource().equals(builder.mapping().source()) == false) {
+                    dirty = true;
                 }
             }
         } catch (Exception e) {
diff --git a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java
index fca91983b2d12..b77e7639152fb 100644
--- a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java
+++ b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java
@@ -226,7 +226,6 @@ private static Map<String, Object> prepareMap(
             map.put("message", index);
             map.put("took", TimeValue.timeValueNanos(tookInNanos));
             map.put("took_millis", "" + TimeUnit.NANOSECONDS.toMillis(tookInNanos));
-            map.put("doc_type", doc.type());
             map.put("id", doc.id());
             map.put("routing", doc.routing());
 
@@ -258,7 +257,6 @@ private static String message(Index index, ParsedDocument doc, long tookInNanos,
             sb.append(index).append(" ");
             sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], ");
             sb.append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], ");
-            sb.append("type[").append(doc.type()).append("], ");
             sb.append("id[").append(doc.id()).append("], ");
             if (doc.routing() == null) {
                 sb.append("routing[]");
diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java
index b821b687c5f68..fe026dd3251eb 100644
--- a/server/src/main/java/org/opensearch/index/engine/Engine.java
+++ b/server/src/main/java/org/opensearch/index/engine/Engine.java
@@ -1389,8 +1389,6 @@ public long startTime() {
             return this.startTime;
         }
 
-        public abstract String type();
-
         abstract String id();
 
         public abstract TYPE operationType();
@@ -1456,11 +1454,6 @@ public ParsedDocument parsedDoc() {
             return this.doc;
         }
 
-        @Override
-        public String type() {
-            return this.doc.type();
-        }
-
         @Override
         public String id() {
             return this.doc.id();
@@ -1485,7 +1478,7 @@ public BytesReference source() {
 
         @Override
         public int estimatedSizeInBytes() {
-            return (id().length() + type().length()) * 2 + source().length() + 12;
+            return id().length() * 2 + source().length() + 12;
         }
 
         /**
@@ -1516,13 +1509,11 @@ public long getIfPrimaryTerm() {
 
     public static class Delete extends Operation {
 
-        private final String type;
         private final String id;
         private final long ifSeqNo;
         private final long ifPrimaryTerm;
 
         public Delete(
-            String type,
             String id,
             Term uid,
             long seqNo,
@@ -1540,15 +1531,13 @@ public Delete(
             assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset";
             assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM)
                 : "cas operations are only allowed if origin is primary. get [" + origin + "]";
-            this.type = Objects.requireNonNull(type);
             this.id = Objects.requireNonNull(id);
             this.ifSeqNo = ifSeqNo;
             this.ifPrimaryTerm = ifPrimaryTerm;
         }
 
-        public Delete(String type, String id, Term uid, long primaryTerm) {
+        public Delete(String id, Term uid, long primaryTerm) {
             this(
-                type,
                 id,
                 uid,
                 UNASSIGNED_SEQ_NO,
@@ -1564,7 +1553,6 @@ public Delete(String type, String id, Term uid, long primaryTerm) {
 
         public Delete(Delete template, VersionType versionType) {
             this(
-                template.type(),
                 template.id(),
                 template.uid(),
                 template.seqNo(),
@@ -1578,11 +1566,6 @@ public Delete(Delete template, VersionType versionType) {
             );
         }
 
-        @Override
-        public String type() {
-            return this.type;
-        }
-
         @Override
         public String id() {
             return this.id;
@@ -1625,11 +1608,6 @@ public Term uid() {
             throw new UnsupportedOperationException();
         }
 
-        @Override
-        public String type() {
-            throw new UnsupportedOperationException();
-        }
-
         @Override
         public long version() {
             throw new UnsupportedOperationException();
diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java
index fd02f3049cc8e..d1085b01a3707 100644
--- a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java
+++ b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java
@@ -466,7 +466,7 @@ public interface TombstoneDocSupplier {
         /**
          * Creates a tombstone document for a delete operation.
          */
-        ParsedDocument newDeleteTombstoneDoc(String type, String id);
+        ParsedDocument newDeleteTombstoneDoc(String id);
 
         /**
          * Creates a tombstone document for a noop operation.
diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java
index a264c8e0a55d9..1756bc738cae1 100644
--- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java
@@ -1376,15 +1376,13 @@ private boolean assertDocDoesNotExist(final Index index, final boolean allowDele
         final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes());
         if (versionValue != null) {
             if (versionValue.isDelete() == false || allowDeleted == false) {
-                throw new AssertionError(
-                    "doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"
-                );
+                throw new AssertionError("doc [" + index.id() + "] exists in version map (version " + versionValue + ")");
             }
         } else {
             try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) {
                 final long docsWithId = searcher.count(new TermQuery(index.uid()));
                 if (docsWithId > 0) {
-                    throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index");
+                    throw new AssertionError("doc [" + index.id() + "] exists [" + docsWithId + "] times in index");
                 }
             }
         }
@@ -1420,7 +1418,6 @@ public DeleteResult delete(Delete delete) throws IOException {
                 // generate or register sequence number
                 if (delete.origin() == Operation.Origin.PRIMARY) {
                     delete = new Delete(
-                        delete.type(),
                         delete.id(),
                         delete.uid(),
                         generateSeqNoForOperationOnPrimary(delete),
@@ -1608,7 +1605,7 @@ private DeletionStrategy planDeletionAsPrimary(Delete delete) throws IOException
     private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException {
         assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), delete.seqNo(), false, false);
         try {
-            final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id());
+            final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.id());
             assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]";
             tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm());
             tombstone.version().setLongValue(plan.versionOfDeletion);
diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java
index 76bb47c64ab4c..fce866b624367 100644
--- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java
+++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java
@@ -288,10 +288,9 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException {
             assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]";
         } else {
             final String id = fields.uid().id();
-            final String type = fields.uid().type();
             final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
             if (isTombstone) {
-                op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version);
+                op = new Translog.Delete(id, uid, seqNo, primaryTerm, version);
                 assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]";
             } else {
                 final BytesReference source = fields.source();
@@ -310,7 +309,6 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException {
                 // TODO: pass the latest timestamp from engine.
                 final long autoGeneratedIdTimestamp = -1;
                 op = new Translog.Index(
-                    type,
                     id,
                     seqNo,
                     primaryTerm,
diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java
index 8cf315e2fffa8..a877b0085816a 100644
--- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java
+++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java
@@ -295,7 +295,6 @@ private GetResult innerGetLoadFromStoredFields(
                     assert source != null : "original source in translog must exist";
                     SourceToParse sourceToParse = new SourceToParse(
                         shardId.getIndexName(),
-                        MapperService.SINGLE_MAPPING_NAME,
                         id,
                         source,
                         XContentHelper.xContentType(source),
diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java
index 4e9004a880a57..37e740ec33321 100644
--- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java
@@ -252,14 +252,14 @@ public ParsedDocument parse(SourceToParse source) throws MapperParsingException
         return documentParser.parseDocument(source, mapping.metadataMappers);
     }
 
-    public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException {
-        final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
+    public ParsedDocument createDeleteTombstoneDoc(String index, String id) throws MapperParsingException {
+        final SourceToParse emptySource = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON);
         return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone();
     }
 
     public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException {
         final String id = ""; // _id won't be used.
-        final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON);
+        final SourceToParse sourceToParse = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON);
         final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone();
         // Store the reason of a noop as a raw string in the _source field
         final BytesRef byteRef = new BytesRef(reason);
diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java
index 30579f501a50c..bcafddd6d5816 100644
--- a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java
@@ -53,7 +53,6 @@
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Objects;
 
 import static org.opensearch.index.mapper.FieldMapper.IGNORE_MALFORMED_SETTING;
 
@@ -71,8 +70,6 @@ final class DocumentParser {
     }
 
     ParsedDocument parseDocument(SourceToParse source, MetadataFieldMapper[] metadataFieldsMappers) throws MapperParsingException {
-        validateType(source);
-
         final Mapping mapping = docMapper.mapping();
         final ParseContext.InternalParseContext context;
         final XContentType xContentType = source.getXContentType();
@@ -140,17 +137,6 @@ private static void internalParseDocument(
         }
     }
 
-    private void validateType(SourceToParse source) {
-        if (Objects.equals(source.type(), docMapper.type()) == false && MapperService.SINGLE_MAPPING_NAME.equals(source.type()) == false) { // used
-                                                                                                                                            // by
-                                                                                                                                            // typeless
-                                                                                                                                            // APIs
-            throw new MapperParsingException(
-                "Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]"
-            );
-        }
-    }
-
     private static void validateStart(XContentParser parser) throws IOException {
         // will result in START_OBJECT
         XContentParser.Token token = parser.nextToken();
@@ -189,7 +175,6 @@ private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.
             context.version(),
             context.seqID(),
             context.sourceToParse().id(),
-            context.sourceToParse().type(),
             source.routing(),
             context.docs(),
             context.sourceToParse().source(),
diff --git a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java
index 2d3b5fc1bb9dc..6991db3306ea7 100644
--- a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java
+++ b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java
@@ -47,7 +47,7 @@ public class ParsedDocument {
 
     private final Field version;
 
-    private final String id, type;
+    private final String id;
     private final SeqNoFieldMapper.SequenceIDFields seqID;
 
     private final String routing;
@@ -63,7 +63,6 @@ public ParsedDocument(
         Field version,
         SeqNoFieldMapper.SequenceIDFields seqID,
         String id,
-        String type,
         String routing,
         List<Document> documents,
         BytesReference source,
@@ -73,7 +72,6 @@ public ParsedDocument(
         this.version = version;
         this.seqID = seqID;
         this.id = id;
-        this.type = type;
         this.routing = routing;
         this.documents = documents;
         this.source = source;
@@ -85,10 +83,6 @@ public String id() {
         return this.id;
     }
 
-    public String type() {
-        return this.type;
-    }
-
     public Field version() {
         return version;
     }
diff --git a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java
index 37ecf9491e4b3..4aa8d3117bc9c 100644
--- a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java
+++ b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java
@@ -45,17 +45,14 @@ public class SourceToParse {
 
     private final String index;
 
-    private final String type;
-
     private final String id;
 
     private final @Nullable String routing;
 
     private final XContentType xContentType;
 
-    public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType, @Nullable String routing) {
+    public SourceToParse(String index, String id, BytesReference source, XContentType xContentType, @Nullable String routing) {
         this.index = Objects.requireNonNull(index);
-        this.type = Objects.requireNonNull(type);
         this.id = Objects.requireNonNull(id);
         // we always convert back to byte array, since we store it and Field only supports bytes..
         // so, we might as well do it here, and improve the performance of working with direct byte arrays
@@ -64,8 +61,8 @@ public SourceToParse(String index, String type, String id, BytesReference source
         this.routing = routing;
     }
 
-    public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) {
-        this(index, type, id, source, xContentType, null);
+    public SourceToParse(String index, String id, BytesReference source, XContentType xContentType) {
+        this(index, id, source, xContentType, null);
     }
 
     public BytesReference source() {
@@ -76,10 +73,6 @@ public String index() {
         return this.index;
     }
 
-    public String type() {
-        return this.type;
-    }
-
     public String id() {
         return this.id;
     }
diff --git a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java
index ce7bdd3682d83..9adb1430b3df0 100644
--- a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java
+++ b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java
@@ -186,7 +186,7 @@ public void preParse(ParseContext context) {
         if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
             return;
         }
-        context.doc().add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType));
+        context.doc().add(new Field(fieldType().name(), MapperService.SINGLE_MAPPING_NAME, fieldType));
         if (fieldType().hasDocValues()) {
             context.doc().add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(MapperService.SINGLE_MAPPING_NAME)));
         }
diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java
index cd45b9483834b..9aac2c11e2d35 100644
--- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java
+++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java
@@ -122,7 +122,6 @@
 import org.opensearch.index.mapper.DocumentMapper;
 import org.opensearch.index.mapper.DocumentMapperForType;
 import org.opensearch.index.mapper.IdFieldMapper;
-import org.opensearch.index.mapper.MapperParsingException;
 import org.opensearch.index.mapper.MapperService;
 import org.opensearch.index.mapper.Mapping;
 import org.opensearch.index.mapper.ParsedDocument;
@@ -154,7 +153,6 @@
 import org.opensearch.index.warmer.WarmerStats;
 import org.opensearch.indices.IndexingMemoryController;
 import org.opensearch.indices.IndicesService;
-import org.opensearch.indices.TypeMissingException;
 import org.opensearch.indices.breaker.CircuitBreakerService;
 import org.opensearch.indices.cluster.IndicesClusterStateService;
 import org.opensearch.indices.recovery.PeerRecoveryTargetService;
@@ -867,23 +865,9 @@ private Engine.IndexResult applyIndexOperation(
         ensureWriteAllowed(origin);
         Engine.Index operation;
         try {
-            final String resolvedType = mapperService.resolveDocumentType(sourceToParse.type());
-            final SourceToParse sourceWithResolvedType;
-            if (resolvedType.equals(sourceToParse.type())) {
-                sourceWithResolvedType = sourceToParse;
-            } else {
-                sourceWithResolvedType = new SourceToParse(
-                    sourceToParse.index(),
-                    resolvedType,
-                    sourceToParse.id(),
-                    sourceToParse.source(),
-                    sourceToParse.getXContentType(),
-                    sourceToParse.routing()
-                );
-            }
             operation = prepareIndex(
                 docMapper(),
-                sourceWithResolvedType,
+                sourceToParse,
                 seqNo,
                 opPrimaryTerm,
                 version,
@@ -953,8 +937,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc
             if (logger.isTraceEnabled()) {
                 // don't use index.source().utf8ToString() here source might not be valid UTF-8
                 logger.trace(
-                    "index [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
-                    index.type(),
+                    "index [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
                     index.id(),
                     index.seqNo(),
                     routingEntry().allocationId(),
@@ -966,9 +949,8 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc
             result = engine.index(index);
             if (logger.isTraceEnabled()) {
                 logger.trace(
-                    "index-done [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] "
+                    "index-done [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] "
                         + "result-seq# [{}] result-term [{}] failure [{}]",
-                    index.type(),
                     index.id(),
                     index.seqNo(),
                     routingEntry().allocationId(),
@@ -984,8 +966,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc
             if (logger.isTraceEnabled()) {
                 logger.trace(
                     new ParameterizedMessage(
-                        "index-fail [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
-                        index.type(),
+                        "index-fail [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]",
                         index.id(),
                         index.seqNo(),
                         routingEntry().allocationId(),
@@ -1038,7 +1019,6 @@ public Engine.DeleteResult getFailedDeleteResult(Exception e, long version) {
 
     public Engine.DeleteResult applyDeleteOperationOnPrimary(
         long version,
-        String type,
         String id,
         VersionType versionType,
         long ifSeqNo,
@@ -1050,7 +1030,6 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary(
             UNASSIGNED_SEQ_NO,
             getOperationPrimaryTerm(),
             version,
-            type,
             id,
             versionType,
             ifSeqNo,
@@ -1059,14 +1038,12 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary(
         );
     }
 
-    public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String type, String id)
-        throws IOException {
+    public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String id) throws IOException {
         return applyDeleteOperation(
             getEngine(),
             seqNo,
             opPrimaryTerm,
             version,
-            type,
             id,
             null,
             UNASSIGNED_SEQ_NO,
@@ -1080,7 +1057,6 @@ private Engine.DeleteResult applyDeleteOperation(
         long seqNo,
         long opPrimaryTerm,
         long version,
-        String type,
         String id,
         @Nullable VersionType versionType,
         long ifSeqNo,
@@ -1093,52 +1069,12 @@ private Engine.DeleteResult applyDeleteOperation(
             + getOperationPrimaryTerm()
             + "]";
         ensureWriteAllowed(origin);
-        // When there is a single type, the unique identifier is only composed of the _id,
-        // so there is no way to differentiate foo#1 from bar#1. This is especially an issue
-        // if a user first deletes foo#1 and then indexes bar#1: since we do not encode the
-        // _type in the uid it might look like we are reindexing the same document, which
-        // would fail if bar#1 is indexed with a lower version than foo#1 was deleted with.
-        // In order to work around this issue, we make deletions create types. This way, we
-        // fail if index and delete operations do not use the same type.
-        // TODO: clean this up when types are gone
-        try {
-            Mapping update = docMapper().getMapping();
-            if (update != null) {
-                return new Engine.DeleteResult(update);
-            }
-        } catch (MapperParsingException | IllegalArgumentException | TypeMissingException e) {
-            return new Engine.DeleteResult(e, version, getOperationPrimaryTerm(), seqNo, false);
-        }
-        if (mapperService.resolveDocumentType(type).equals(mapperService.documentMapper().type()) == false) {
-            // We should never get there due to the fact that we generate mapping updates on deletes,
-            // but we still prefer to have a hard exception here as we would otherwise delete a
-            // document in the wrong type.
-            throw new IllegalStateException(
-                "Deleting document from type ["
-                    + mapperService.resolveDocumentType(type)
-                    + "] while current type is ["
-                    + mapperService.documentMapper().type()
-                    + "]"
-            );
-        }
         final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
-        final Engine.Delete delete = prepareDelete(
-            type,
-            id,
-            uid,
-            seqNo,
-            opPrimaryTerm,
-            version,
-            versionType,
-            origin,
-            ifSeqNo,
-            ifPrimaryTerm
-        );
+        final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
         return delete(engine, delete);
     }
 
     private Engine.Delete prepareDelete(
-        String type,
         String id,
         Term uid,
         long seqNo,
@@ -1150,19 +1086,7 @@ private Engine.Delete prepareDelete(
         long ifPrimaryTerm
     ) {
         long startTime = System.nanoTime();
-        return new Engine.Delete(
-            mapperService.resolveDocumentType(type),
-            id,
-            uid,
-            seqNo,
-            primaryTerm,
-            version,
-            versionType,
-            origin,
-            startTime,
-            ifSeqNo,
-            ifPrimaryTerm
-        );
+        return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm);
     }
 
     private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException {
@@ -1813,7 +1737,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o
                     origin,
                     new SourceToParse(
                         shardId.getIndexName(),
-                        index.type(),
                         index.id(),
                         index.source(),
                         XContentHelper.xContentType(index.source()),
@@ -1828,7 +1751,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o
                     delete.seqNo(),
                     delete.primaryTerm(),
                     delete.version(),
-                    delete.type(),
                     delete.id(),
                     versionType,
                     UNASSIGNED_SEQ_NO,
@@ -3873,8 +3795,8 @@ private EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() {
             : null;
         return new EngineConfig.TombstoneDocSupplier() {
             @Override
-            public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
-                return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id);
+            public ParsedDocument newDeleteTombstoneDoc(String id) {
+                return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), id);
             }
 
             @Override
diff --git a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java
index ecbdd3875f14a..3467a86c86c86 100644
--- a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java
+++ b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java
@@ -391,7 +391,7 @@ private static ParsedDocument parseDocument(
         MapperService mapperService = indexShard.mapperService();
         DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate();
         ParsedDocument parsedDocument = docMapper.getDocumentMapper()
-            .parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing));
+            .parse(new SourceToParse(index, "_id_for_tv_api", doc, xContentType, routing));
         if (docMapper.getMapping() != null) {
             parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
         }
diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java
index dc7a2bb331808..e04dd6681705e 100644
--- a/server/src/main/java/org/opensearch/index/translog/Translog.java
+++ b/server/src/main/java/org/opensearch/index/translog/Translog.java
@@ -35,7 +35,7 @@
 import org.apache.logging.log4j.message.ParameterizedMessage;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.store.AlreadyClosedException;
-import org.opensearch.LegacyESVersion;
+import org.opensearch.Version;
 import org.opensearch.common.Nullable;
 import org.opensearch.common.Strings;
 import org.opensearch.common.UUIDs;
@@ -54,6 +54,7 @@
 import org.opensearch.index.VersionType;
 import org.opensearch.index.engine.Engine;
 import org.opensearch.index.engine.MissingHistoryOperationsException;
+import org.opensearch.index.mapper.MapperService;
 import org.opensearch.index.seqno.SequenceNumbers;
 import org.opensearch.index.shard.AbstractIndexShardComponent;
 import org.opensearch.index.shard.IndexShardComponent;
@@ -1192,11 +1193,10 @@ public static class Index implements Operation {
         public static final int FORMAT_6_0 = 8; // since 6.0.0
         public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0
         public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1;
-        public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE;
-
+        public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1;
+        public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE;
         private final String id;
         private final long autoGeneratedIdTimestamp;
-        private final String type;
         private final long seqNo;
         private final long primaryTerm;
         private final long version;
@@ -1207,7 +1207,10 @@ private Index(final StreamInput in) throws IOException {
             final int format = in.readVInt(); // SERIALIZATION_FORMAT
             assert format >= FORMAT_6_0 : "format was: " + format;
             id = in.readString();
-            type = in.readString();
+            if (format < FORMAT_NO_DOC_TYPE) {
+                in.readString();
+                // can't assert that this is _doc because pre 2.0 indexes can have any name for a type
+            }
             source = in.readBytesReference();
             routing = in.readOptionalString();
             if (format < FORMAT_NO_PARENT) {
@@ -1224,7 +1227,6 @@ private Index(final StreamInput in) throws IOException {
 
         public Index(Engine.Index index, Engine.IndexResult indexResult) {
             this.id = index.id();
-            this.type = index.type();
             this.source = index.source();
             this.routing = index.routing();
             this.seqNo = indexResult.getSeqNo();
@@ -1233,21 +1235,11 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) {
             this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp();
         }
 
-        public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) {
-            this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1);
+        public Index(String id, long seqNo, long primaryTerm, byte[] source) {
+            this(id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1);
         }
 
-        public Index(
-            String type,
-            String id,
-            long seqNo,
-            long primaryTerm,
-            long version,
-            byte[] source,
-            String routing,
-            long autoGeneratedIdTimestamp
-        ) {
-            this.type = type;
+        public Index(String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) {
             this.id = id;
             this.source = new BytesArray(source);
             this.seqNo = seqNo;
@@ -1264,12 +1256,10 @@ public Type opType() {
 
         @Override
         public long estimateSize() {
-            return (2 * id.length()) + (2 * type.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4
-                * Long.BYTES); // timestamp, seq_no, primary_term, and version
-        }
-
-        public String type() {
-            return this.type;
+            return (2 * id.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 * Long.BYTES); // timestamp,
+                                                                                                                          // seq_no,
+                                                                                                                          // primary_term,
+                                                                                                                          // and version
         }
 
         public String id() {
@@ -1304,10 +1294,12 @@ public Source getSource() {
         }
 
         private void write(final StreamOutput out) throws IOException {
-            final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
+            final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE;
             out.writeVInt(format);
             out.writeString(id);
-            out.writeString(type);
+            if (format < FORMAT_NO_DOC_TYPE) {
+                out.writeString(MapperService.SINGLE_MAPPING_NAME);
+            }
             out.writeBytesReference(source);
             out.writeOptionalString(routing);
             if (format < FORMAT_NO_PARENT) {
@@ -1337,7 +1329,6 @@ public boolean equals(Object o) {
                 || seqNo != index.seqNo
                 || primaryTerm != index.primaryTerm
                 || id.equals(index.id) == false
-                || type.equals(index.type) == false
                 || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp
                 || source.equals(index.source) == false) {
                 return false;
@@ -1352,7 +1343,6 @@ public boolean equals(Object o) {
         @Override
         public int hashCode() {
             int result = id.hashCode();
-            result = 31 * result + type.hashCode();
             result = 31 * result + Long.hashCode(seqNo);
             result = 31 * result + Long.hashCode(primaryTerm);
             result = 31 * result + Long.hashCode(version);
@@ -1368,9 +1358,6 @@ public String toString() {
                 + "id='"
                 + id
                 + '\''
-                + ", type='"
-                + type
-                + '\''
                 + ", seqNo="
                 + seqNo
                 + ", primaryTerm="
@@ -1393,9 +1380,10 @@ public static class Delete implements Operation {
         private static final int FORMAT_6_0 = 4; // 6.0 - *
         public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0
         public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1;
-        public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE;
+        public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1;
+        public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE;
 
-        private final String type, id;
+        private final String id;
         private final Term uid;
         private final long seqNo;
         private final long primaryTerm;
@@ -1404,7 +1392,10 @@ public static class Delete implements Operation {
         private Delete(final StreamInput in) throws IOException {
             final int format = in.readVInt();// SERIALIZATION_FORMAT
             assert format >= FORMAT_6_0 : "format was: " + format;
-            type = in.readString();
+            if (format < FORMAT_NO_DOC_TYPE) {
+                in.readString();
+                // Can't assert that this is _doc because pre 2.0 indexes can have any name for a type
+            }
             id = in.readString();
             uid = new Term(in.readString(), in.readBytesRef());
             this.version = in.readLong();
@@ -1416,16 +1407,15 @@ private Delete(final StreamInput in) throws IOException {
         }
 
         public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) {
-            this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
+            this(delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion());
         }
 
         /** utility for testing */
-        public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) {
-            this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY);
+        public Delete(String id, long seqNo, long primaryTerm, Term uid) {
+            this(id, uid, seqNo, primaryTerm, Versions.MATCH_ANY);
         }
 
-        public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) {
-            this.type = Objects.requireNonNull(type);
+        public Delete(String id, Term uid, long seqNo, long primaryTerm, long version) {
             this.id = Objects.requireNonNull(id);
             this.uid = uid;
             this.seqNo = seqNo;
@@ -1440,12 +1430,8 @@ public Type opType() {
 
         @Override
         public long estimateSize() {
-            return (id.length() * 2) + (type.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (type.length() * 2)
-                + (3 * Long.BYTES); // seq_no, primary_term, and version;
-        }
-
-        public String type() {
-            return type;
+            return (id.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (3 * Long.BYTES); // seq_no, primary_term,
+                                                                                                                    // and version;
         }
 
         public String id() {
@@ -1476,9 +1462,11 @@ public Source getSource() {
         }
 
         private void write(final StreamOutput out) throws IOException {
-            final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0;
+            final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE;
             out.writeVInt(format);
-            out.writeString(type);
+            if (format < FORMAT_NO_DOC_TYPE) {
+                out.writeString(MapperService.SINGLE_MAPPING_NAME);
+            }
             out.writeString(id);
             out.writeString(uid.field());
             out.writeBytesRef(uid.bytes());
diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java
index 37c1d5d698408..66241f7b6847f 100644
--- a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java
+++ b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java
@@ -283,7 +283,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc
                     final Translog.Index o1 = (Translog.Index) prvOp;
                     final Translog.Index o2 = (Translog.Index) newOp;
                     sameOp = Objects.equals(o1.id(), o2.id())
-                        && Objects.equals(o1.type(), o2.type())
                         && Objects.equals(o1.source(), o2.source())
                         && Objects.equals(o1.routing(), o2.routing())
                         && o1.primaryTerm() == o2.primaryTerm()
@@ -293,7 +292,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc
                     final Translog.Delete o1 = (Translog.Delete) newOp;
                     final Translog.Delete o2 = (Translog.Delete) prvOp;
                     sameOp = Objects.equals(o1.id(), o2.id())
-                        && Objects.equals(o1.type(), o2.type())
                         && o1.primaryTerm() == o2.primaryTerm()
                         && o1.seqNo() == o2.seqNo()
                         && o1.version() == o2.version();
diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java
index a812dd2888e5d..b1fa20307a12b 100644
--- a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java
+++ b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java
@@ -109,7 +109,6 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
     private IndexMetadata indexMetadata() throws IOException {
         return IndexMetadata.builder("index")
             .putMapping(
-                "_doc",
                 "{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}"
             )
             .settings(idxSettings)
@@ -711,7 +710,7 @@ public void testUpdateWithDelete() throws Exception {
         final long resultSeqNo = 13;
         Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation);
         IndexShard shard = mock(IndexShard.class);
-        when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult);
+        when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult);
         when(shard.indexSettings()).thenReturn(indexSettings);
         when(shard.shardId()).thenReturn(shardId);
 
diff --git a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java
index 09bab1af7fc43..1184b05461025 100644
--- a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java
+++ b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java
@@ -127,7 +127,6 @@ public TaskManager getTaskManager() {
                             .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
                     )
                         .putMapping(
-                            "_doc",
                             XContentHelper.convertToJson(
                                 BytesReference.bytes(
                                     XContentFactory.jsonBuilder()
@@ -153,7 +152,6 @@ public TaskManager getTaskManager() {
                                 .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
                         )
                             .putMapping(
-                                "_doc",
                                 XContentHelper.convertToJson(
                                     BytesReference.bytes(
                                         XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java
index f8e76b5e85b61..a078966e3aa80 100644
--- a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java
+++ b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java
@@ -48,7 +48,7 @@ public class ResyncReplicationRequestTests extends OpenSearchTestCase {
 
     public void testSerialization() throws IOException {
         final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8"));
-        final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1);
+        final Translog.Index index = new Translog.Index("id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1);
         final ShardId shardId = new ShardId(new Index("index", "uuid"), 0);
         final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, 100, new Translog.Operation[] { index });
 
diff --git a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java
index 2b6b913b080ec..a544bad4cd9e6 100644
--- a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java
+++ b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java
@@ -206,7 +206,7 @@ public void testResyncDoesNotBlockOnPrimaryAction() throws Exception {
                     shardId,
                     42L,
                     100,
-                    new Translog.Operation[] { new Translog.Index("type", "id", 0, primaryTerm, 0L, bytes, null, -1) }
+                    new Translog.Operation[] { new Translog.Index("id", 0, primaryTerm, 0L, bytes, null, -1) }
                 );
 
                 final PlainActionFuture<ResyncReplicationResponse> listener = new PlainActionFuture<>();
diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
index 29c8204af02b6..b034b335bd9a3 100644
--- a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
+++ b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java
@@ -170,7 +170,7 @@ public void setUp() throws Exception {
         IndexMetadata indexMetadata = IndexMetadata.builder(shardId.getIndexName())
             .settings(indexSettings)
             .primaryTerm(shardId.id(), primary.getOperationPrimaryTerm())
-            .putMapping("_doc", "{ \"properties\": { \"value\":  { \"type\": \"short\"}}}")
+            .putMapping("{ \"properties\": { \"value\":  { \"type\": \"short\"}}}")
             .build();
         state.metadata(Metadata.builder().put(indexMetadata, false).generateClusterUuidIfNeeded());
 
diff --git a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java
index f4f2d9b470a90..b62050a1b8050 100644
--- a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java
+++ b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java
@@ -128,7 +128,6 @@ public TaskManager getTaskManager() {
                             .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
                     )
                         .putMapping(
-                            "_doc",
                             XContentHelper.convertToJson(
                                 BytesReference.bytes(
                                     XContentFactory.jsonBuilder()
@@ -154,7 +153,6 @@ public TaskManager getTaskManager() {
                                 .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID())
                         )
                             .putMapping(
-                                "_doc",
                                 XContentHelper.convertToJson(
                                     BytesReference.bytes(
                                         XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java
index a60946f8befaa..ddaea6edbfd90 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java
@@ -240,7 +240,7 @@ private static MetadataCreateIndexService getMetadataCreateIndexService() throws
                                     .put(request.settings())
                                     .build()
                             )
-                            .putMapping("_doc", generateMapping("@timestamp"))
+                            .putMapping(generateMapping("@timestamp"))
                             .numberOfShards(1)
                             .numberOfReplicas(1)
                             .build(),
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java
index 64716794bde2b..4e7502ada661f 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java
@@ -644,7 +644,7 @@ public void testFindMappings() throws IOException {
                             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
                     )
-                    .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM)
+                    .putMapping(FIND_MAPPINGS_TEST_ITEM)
             )
             .put(
                 IndexMetadata.builder("index2")
@@ -654,7 +654,7 @@ public void testFindMappings() throws IOException {
                             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
                     )
-                    .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM)
+                    .putMapping(FIND_MAPPINGS_TEST_ITEM)
             )
             .build();
 
@@ -739,7 +739,7 @@ public void testFindMappingsWithFilters() throws IOException {
                             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
                     )
-                    .putMapping("_doc", mapping)
+                    .putMapping(mapping)
             )
             .put(
                 IndexMetadata.builder("index2")
@@ -749,7 +749,7 @@ public void testFindMappingsWithFilters() throws IOException {
                             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
                     )
-                    .putMapping("_doc", mapping)
+                    .putMapping(mapping)
             )
             .put(
                 IndexMetadata.builder("index3")
@@ -759,7 +759,7 @@ public void testFindMappingsWithFilters() throws IOException {
                             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
                     )
-                    .putMapping("_doc", mapping)
+                    .putMapping(mapping)
             )
             .build();
 
diff --git a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java
index 80ad315c0f613..253018d7f569f 100644
--- a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java
+++ b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java
@@ -112,8 +112,7 @@ public void testSimpleJsonFromAndTo() throws IOException {
                     .creationDate(2L)
                     .numberOfShards(1)
                     .numberOfReplicas(2)
-                    .putMapping("mapping1", MAPPING_SOURCE1)
-                    .putMapping("mapping2", MAPPING_SOURCE2)
+                    .putMapping(MAPPING_SOURCE1)
                     .putAlias(newAliasMetadataBuilder("alias1").filter(ALIAS_FILTER1))
                     .putAlias(newAliasMetadataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean()))
                     .putAlias(newAliasMetadataBuilder("alias4").filter(ALIAS_FILTER2))
diff --git a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java
index facb443422b31..38c8491d79150 100644
--- a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java
+++ b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java
@@ -223,7 +223,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException {
             new NumericDocValuesField("version", 1),
             SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
             "id",
-            "test",
             "routingValue",
             null,
             source,
@@ -237,7 +236,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException {
         assertThat(p.getValueFor("message"), equalTo("[foo/123]"));
         assertThat(p.getValueFor("took"), equalTo("10nanos"));
         assertThat(p.getValueFor("took_millis"), equalTo("0"));
-        assertThat(p.getValueFor("doc_type"), equalTo("test"));
         assertThat(p.getValueFor("id"), equalTo("id"));
         assertThat(p.getValueFor("routing"), equalTo("routingValue"));
         assertThat(p.getValueFor("source"), is(emptyOrNullString()));
@@ -253,7 +251,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
             new NumericDocValuesField("version", 1),
             SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
             "id",
-            "test",
             null,
             null,
             source,
@@ -284,7 +281,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException {
             new NumericDocValuesField("version", 1),
             SeqNoFieldMapper.SequenceIDFields.emptySeqID(),
             "id",
-            "test",
             null,
             null,
             source,
diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
index 5f98a05840562..aeba4b1b2f0e7 100644
--- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java
@@ -124,16 +124,11 @@
 import org.opensearch.index.VersionType;
 import org.opensearch.index.codec.CodecService;
 import org.opensearch.index.fieldvisitor.FieldsVisitor;
-import org.opensearch.index.mapper.ContentPath;
 import org.opensearch.index.mapper.IdFieldMapper;
-import org.opensearch.index.mapper.Mapper.BuilderContext;
 import org.opensearch.index.mapper.MapperService;
-import org.opensearch.index.mapper.Mapping;
-import org.opensearch.index.mapper.MetadataFieldMapper;
 import org.opensearch.index.mapper.ParseContext;
 import org.opensearch.index.mapper.ParseContext.Document;
 import org.opensearch.index.mapper.ParsedDocument;
-import org.opensearch.index.mapper.RootObjectMapper;
 import org.opensearch.index.mapper.SeqNoFieldMapper;
 import org.opensearch.index.mapper.SourceFieldMapper;
 import org.opensearch.index.mapper.Uid;
@@ -195,7 +190,6 @@
 import java.util.stream.Collectors;
 import java.util.stream.LongStream;
 
-import static java.util.Collections.emptyMap;
 import static java.util.Collections.shuffle;
 import static org.hamcrest.CoreMatchers.instanceOf;
 import static org.hamcrest.CoreMatchers.sameInstance;
@@ -304,7 +298,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException {
         if (operation.origin() == PRIMARY) {
             assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired());
         }
-        engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get()));
+        engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get()));
         assertTrue("safe access should be required", engine.isSafeAccessRequired());
         engine.refresh("test");
         assertTrue("safe access should be required", engine.isSafeAccessRequired());
@@ -478,7 +472,7 @@ public void testSegments() throws Exception {
                 liveDocsFirstSegment.remove(idToUpdate);
                 ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
                 if (randomBoolean()) {
-                    engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc), primaryTerm.get()));
+                    engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
                     deletes++;
                 } else {
                     engine.index(indexForDoc(doc));
@@ -609,7 +603,6 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException {
                     initialEngine.index(operation);
                 } else {
                     final Engine.Delete operation = new Engine.Delete(
-                        "test",
                         "1",
                         newUid(doc),
                         UNASSIGNED_SEQ_NO,
@@ -879,7 +872,7 @@ public void testSimpleOperations() throws Exception {
         searchResult.close();
 
         // now delete
-        engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get()));
+        engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
 
         // its not deleted yet
         searchResult = engine.acquireSearcher("test");
@@ -1026,7 +1019,7 @@ public void testSearchResultRelease() throws Exception {
         // don't release the search result yet...
 
         // delete, refresh and do a new search, it should not be there
-        engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get()));
+        engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
         engine.refresh("test");
         Engine.Searcher updateSearchResult = engine.acquireSearcher("test");
         MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
@@ -1471,7 +1464,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception {
         final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
         final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
         final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
-        final MapperService mapperService = createMapperService("test");
+        final MapperService mapperService = createMapperService();
         final Set<String> liveDocs = new HashSet<>();
         try (
             Store store = createStore();
@@ -1488,7 +1481,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception {
             for (int i = 0; i < numDocs; i++) {
                 ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
                 if (randomBoolean()) {
-                    engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
+                    engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
                     liveDocs.remove(doc.id());
                 }
                 if (randomBoolean()) {
@@ -1550,7 +1543,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc
         final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
         final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
         final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
-        final MapperService mapperService = createMapperService("test");
+        final MapperService mapperService = createMapperService();
         final boolean omitSourceAllTheTime = randomBoolean();
         final Set<String> liveDocs = new HashSet<>();
         final Set<String> liveDocsWithSource = new HashSet<>();
@@ -1574,7 +1567,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc
                 boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
                 ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null, useRecoverySource);
                 if (randomBoolean()) {
-                    engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
+                    engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
                     liveDocs.remove(doc.id());
                     liveDocsWithSource.remove(doc.id());
                 }
@@ -1826,7 +1819,6 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup
             } else {
                 Engine.Delete delete = (Engine.Delete) operation;
                 return new Engine.Delete(
-                    delete.type(),
                     delete.id(),
                     delete.uid(),
                     newSeqNo,
@@ -1930,7 +1922,6 @@ private int assertOpsOnPrimary(List<Engine.Operation> ops, long currentOpVersion
             0
         );
         BiFunction<Long, Engine.Delete, Engine.Delete> delWithVersion = (version, delete) -> new Engine.Delete(
-            delete.type(),
             delete.id(),
             delete.uid(),
             UNASSIGNED_SEQ_NO,
@@ -1957,7 +1948,6 @@ private int assertOpsOnPrimary(List<Engine.Operation> ops, long currentOpVersion
             term
         );
         TriFunction<Long, Long, Engine.Delete, Engine.Delete> delWithSeq = (seqNo, term, delete) -> new Engine.Delete(
-            delete.type(),
             delete.id(),
             delete.uid(),
             UNASSIGNED_SEQ_NO,
@@ -1984,7 +1974,6 @@ private int assertOpsOnPrimary(List<Engine.Operation> ops, long currentOpVersion
             index.getIfPrimaryTerm()
         );
         Function<Engine.Delete, Engine.Delete> deleteWithCurrentTerm = delete -> new Engine.Delete(
-            delete.type(),
             delete.id(),
             delete.uid(),
             UNASSIGNED_SEQ_NO,
@@ -2371,7 +2360,7 @@ public void testBasicCreatedFlag() throws IOException {
         indexResult = engine.index(index);
         assertFalse(indexResult.isCreated());
 
-        engine.delete(new Engine.Delete("doc", "1", newUid(doc), primaryTerm.get()));
+        engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get()));
 
         index = indexForDoc(doc);
         indexResult = engine.index(index);
@@ -2503,7 +2492,6 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException {
                     // we have some docs indexed, so delete one of them
                     id = randomFrom(indexedIds);
                     final Engine.Delete delete = new Engine.Delete(
-                        "test",
                         id,
                         newUid(id),
                         UNASSIGNED_SEQ_NO,
@@ -2817,7 +2805,6 @@ public void testEnableGcDeletes() throws Exception {
             // Delete document we just added:
             engine.delete(
                 new Engine.Delete(
-                    "test",
                     "1",
                     newUid(doc),
                     UNASSIGNED_SEQ_NO,
@@ -2845,7 +2832,6 @@ public void testEnableGcDeletes() throws Exception {
             // Delete non-existent document
             engine.delete(
                 new Engine.Delete(
-                    "test",
                     "2",
                     newUid("2"),
                     UNASSIGNED_SEQ_NO,
@@ -3234,15 +3220,6 @@ public void testSkipTranslogReplay() throws IOException {
         }
     }
 
-    private Mapping dynamicUpdate() {
-        BuilderContext context = new BuilderContext(
-            Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(),
-            new ContentPath()
-        );
-        final RootObjectMapper root = new RootObjectMapper.Builder("some_type").build(context);
-        return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
-    }
-
     private Path[] filterExtraFSFiles(Path[] files) {
         List<Path> paths = new ArrayList<>();
         for (Path p : files) {
@@ -3278,7 +3255,6 @@ public void testTranslogReplay() throws IOException {
         }
         assertVisibleCount(engine, numDocs);
         translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
-        translogHandler.mappingUpdate = dynamicUpdate();
 
         engine.close();
         // we need to reuse the engine config unless the parser.mappingModified won't work
@@ -3288,12 +3264,6 @@ public void testTranslogReplay() throws IOException {
 
         assertVisibleCount(engine, numDocs, false);
         assertEquals(numDocs, translogHandler.appliedOperations());
-        if (translogHandler.mappingUpdate != null) {
-            assertEquals(1, translogHandler.getRecoveredTypes().size());
-            assertTrue(translogHandler.getRecoveredTypes().containsKey("test"));
-        } else {
-            assertEquals(0, translogHandler.getRecoveredTypes().size());
-        }
 
         engine.close();
         translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
@@ -3358,7 +3328,7 @@ public void testTranslogReplay() throws IOException {
             assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
         }
         assertEquals(flush ? 1 : 2, translogHandler.appliedOperations());
-        engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc), primaryTerm.get()));
+        engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get()));
         if (randomBoolean()) {
             engine.close();
             engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
@@ -3405,7 +3375,7 @@ public void testRecoverFromForeignTranslog() throws IOException {
             primaryTerm::get,
             seqNo -> {}
         );
-        translog.add(new Translog.Index("test", "SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8"))));
         assertEquals(generation.translogFileGeneration, translog.currentFileGeneration());
         translog.close();
 
@@ -3689,10 +3659,7 @@ public BytesRef binaryValue() {
                 }
                 // now the engine is closed check we respond correctly
                 expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1)));
-                expectThrows(
-                    AlreadyClosedException.class,
-                    () -> engine.delete(new Engine.Delete("test", "", newUid(doc1), primaryTerm.get()))
-                );
+                expectThrows(AlreadyClosedException.class, () -> engine.delete(new Engine.Delete("", newUid(doc1), primaryTerm.get())));
                 expectThrows(
                     AlreadyClosedException.class,
                     () -> engine.noOp(
@@ -3714,8 +3681,8 @@ public void testDeleteWithFatalError() throws Exception {
         try (Store store = createStore()) {
             EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() {
                 @Override
-                public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
-                    ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(type, id);
+                public ParsedDocument newDeleteTombstoneDoc(String id) {
+                    ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id);
                     parsedDocument.rootDoc().add(new StoredField("foo", "bar") {
                         // this is a hack to add a failure during store document which triggers a tragic event
                         // and in turn fails the engine
@@ -3736,10 +3703,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) {
             try (InternalEngine engine = createEngine(null, null, null, config)) {
                 final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
                 engine.index(indexForDoc(doc));
-                expectThrows(
-                    IllegalStateException.class,
-                    () -> engine.delete(new Engine.Delete("test", "1", newUid("1"), primaryTerm.get()))
-                );
+                expectThrows(IllegalStateException.class, () -> engine.delete(new Engine.Delete("1", newUid("1"), primaryTerm.get())));
                 assertTrue(engine.isClosed.get());
                 assertSame(tragicException, engine.failedEngine.get());
             }
@@ -3839,7 +3803,6 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException
         Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
         Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5));
         Engine.Delete delete = new Engine.Delete(
-            operation.type(),
             operation.id(),
             operation.uid(),
             Math.max(retry.seqNo(), operation.seqNo()) + 1,
@@ -4000,7 +3963,7 @@ public void testDoubleDeliveryReplica() throws IOException {
             assertEquals(1, topDocs.totalHits.value);
         }
         if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
-            List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService("test"));
+            List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService());
             assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L));
         }
     }
@@ -4597,7 +4560,6 @@ public void testLookupSeqNoByIdInLucene() throws Exception {
                 } else {
                     operations.add(
                         new Engine.Delete(
-                            doc.type(),
                             doc.id(),
                             EngineTestCase.newUid(doc),
                             seqNo,
@@ -4806,7 +4768,6 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio
                 operations.add(index);
             } else {
                 final Engine.Delete delete = new Engine.Delete(
-                    "test",
                     "1",
                     uid,
                     sequenceNumberSupplier.getAsLong(),
@@ -4868,7 +4829,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio
      */
     public void testVersionConflictIgnoreDeletedDoc() throws IOException {
         ParsedDocument doc = testParsedDocument("1", null, testDocument(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
-        engine.delete(new Engine.Delete("test", "1", newUid("1"), 1));
+        engine.delete(new Engine.Delete("1", newUid("1"), 1));
         for (long seqNo : new long[] { 0, 1, randomNonNegativeLong() }) {
             assertDeletedVersionConflict(
                 engine.index(
@@ -4893,7 +4854,6 @@ public void testVersionConflictIgnoreDeletedDoc() throws IOException {
             assertDeletedVersionConflict(
                 engine.delete(
                     new Engine.Delete(
-                        "test",
                         "1",
                         newUid("1"),
                         UNASSIGNED_SEQ_NO,
@@ -4973,7 +4933,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) {
             assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get()));
             assertThat(noOp.reason(), equalTo(reason));
             if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
-                MapperService mapperService = createMapperService("test");
+                MapperService mapperService = createMapperService();
                 List<Translog.Operation> operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService);
                 assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop.
                 for (int i = 0; i < operationsFromLucene.size(); i++) {
@@ -5050,7 +5010,7 @@ public void testRandomOperations() throws Exception {
             }
         }
         if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
-            List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService("test"));
+            List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService());
             assertThat(operations, hasSize(numOps));
         }
     }
@@ -5207,7 +5167,7 @@ public void testRestoreLocalHistoryFromTranslog() throws IOException {
                     equalTo(0)
                 );
             }
-            assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
+            assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService());
         }
     }
 
@@ -5409,7 +5369,6 @@ public void testSeqNoGenerator() throws IOException {
         ) {
             final String id = "id";
             final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE);
-            final String type = "type";
             final Field versionField = new NumericDocValuesField("_version", 0);
             final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
             final ParseContext.Document document = new ParseContext.Document();
@@ -5423,7 +5382,6 @@ public void testSeqNoGenerator() throws IOException {
                 versionField,
                 seqID,
                 id,
-                type,
                 "routing",
                 Collections.singletonList(document),
                 source,
@@ -5450,7 +5408,6 @@ public void testSeqNoGenerator() throws IOException {
             assertThat(seqNoGenerator.get(), equalTo(seqNo + 1));
 
             final Engine.Delete delete = new Engine.Delete(
-                type,
                 id,
                 new Term("_id", parsedDocument.id()),
                 UNASSIGNED_SEQ_NO,
@@ -5577,7 +5534,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException,
                     Engine.Index operation = appendOnlyPrimary(doc, false, 1);
                     engine.index(operation);
                     if (rarely()) {
-                        engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get()));
+                        engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get()));
                         numDeletes.incrementAndGet();
                     } else {
                         doc = testParsedDocument(
@@ -5915,7 +5872,7 @@ public void testStressUpdateSameDocWhileGettingIt() throws IOException, Interrup
                 );
                 // first index an append only document and then delete it. such that we have it in the tombstones
                 engine.index(doc);
-                engine.delete(new Engine.Delete(doc.type(), doc.id(), doc.uid(), primaryTerm.get()));
+                engine.delete(new Engine.Delete(doc.id(), doc.uid(), primaryTerm.get()));
 
                 // now index more append only docs and refresh so we re-enabel the optimization for unsafe version map
                 ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
@@ -6163,7 +6120,7 @@ public void testHistoryBasedOnSource() throws Exception {
                     engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
                 }
             }
-            MapperService mapperService = createMapperService("test");
+            MapperService mapperService = createMapperService();
             List<Translog.Operation> luceneOps = readAllOperationsBasedOnSource(engine, mapperService);
             assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
         }
@@ -6230,7 +6187,7 @@ private void assertOperationHistoryInLucene(List<Engine.Operation> operations) t
                     engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
                 }
             }
-            MapperService mapperService = createMapperService("test");
+            MapperService mapperService = createMapperService();
             List<Translog.Operation> actualOps = readAllOperationsInLucene(engine, mapperService);
             assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
             assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
@@ -6320,7 +6277,7 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException {
                 long minRetainSeqNos = engine.getMinRetainedSeqNo();
                 assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1));
                 Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new);
-                Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream()
+                Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService()).stream()
                     .map(Translog.Operation::seqNo)
                     .collect(Collectors.toSet());
                 assertThat(actualOps, containsInAnyOrder(expectedOps));
@@ -6369,7 +6326,7 @@ public void testLastRefreshCheckpoint() throws Exception {
     }
 
     public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception {
-        final MapperService mapperService = createMapperService("test");
+        final MapperService mapperService = createMapperService();
         final long maxSeqNo = randomLongBetween(10, 50);
         final AtomicLong refreshCounter = new AtomicLong();
         try (
@@ -6484,7 +6441,7 @@ public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception {
                     );
                 }
             } else {
-                Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
+                Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
                 liveDocIds.remove(doc.id());
                 assertThat(
                     "delete operations on primary must advance max_seq_no_of_updates",
@@ -6712,7 +6669,7 @@ public void testPruneAwayDeletedButRetainedIds() throws Exception {
                 index(engine, i);
             }
             engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
-            engine.delete(new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get()));
+            engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get()));
             engine.refresh("test");
             // now we have 2 segments since we now added a tombstone plus the old segment with the delete
             try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
@@ -6913,7 +6870,7 @@ private void runTestDeleteFailure(final CheckedBiConsumer<InternalEngine, Engine
             return iw.get();
         }, null, null, config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
             engine.index(new Engine.Index(newUid("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null)));
-            final Engine.Delete op = new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get());
+            final Engine.Delete op = new Engine.Delete("0", newUid("0"), primaryTerm.get());
             consumer.accept(engine, op);
             iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
             final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op));
@@ -7185,7 +7142,7 @@ public void testMaxDocsOnPrimary() throws Exception {
                     operations.add(indexForDoc(createParsedDoc(id, null)));
                 } else {
                     id = "not_found";
-                    operations.add(new Engine.Delete("_doc", id, newUid(id), primaryTerm.get()));
+                    operations.add(new Engine.Delete(id, newUid(id), primaryTerm.get()));
                 }
             }
             for (int i = 0; i < numDocs; i++) {
diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java
index ff569898b4910..05b6c77cad818 100644
--- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java
@@ -59,7 +59,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase {
 
     @Before
     public void createMapper() throws Exception {
-        mapperService = createMapperService("test");
+        mapperService = createMapperService();
     }
 
     @Override
@@ -92,7 +92,7 @@ public void testBasics() throws Exception {
             if (randomBoolean()) {
                 engine.index(indexForDoc(doc));
             } else {
-                engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()));
+                engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
             }
             if (rarely()) {
                 if (randomBoolean()) {
@@ -264,7 +264,7 @@ public void testUpdateAndReadChangesConcurrently() throws Exception {
                 if (randomBoolean()) {
                     op = new Engine.Index(newUid(doc), primaryTerm.get(), doc);
                 } else {
-                    op = new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get());
+                    op = new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get());
                 }
             } else {
                 if (randomBoolean()) {
diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java
index e04bf1a4f20f2..a015443979527 100644
--- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java
@@ -152,7 +152,7 @@ public void testNoOpEngineStats() throws Exception {
                 for (int i = 0; i < numDocs; i++) {
                     if (randomBoolean()) {
                         String delId = Integer.toString(i);
-                        Engine.DeleteResult result = engine.delete(new Engine.Delete("_doc", delId, newUid(delId), primaryTerm.get()));
+                        Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get()));
                         assertTrue(result.isFound());
                         engine.syncTranslog(); // advance persisted local checkpoint
                         globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
diff --git a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java
index 609e972b2c026..95a2db9d74c38 100644
--- a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java
+++ b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java
@@ -112,7 +112,7 @@ public void testReadOnlyEngine() throws Exception {
                 for (int i = 0; i < numDocs; i++) {
                     if (randomBoolean()) {
                         String delId = Integer.toString(i);
-                        engine.delete(new Engine.Delete("test", delId, newUid(delId), primaryTerm.get()));
+                        engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get()));
                     }
                     if (rarely()) {
                         engine.flush();
diff --git a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java
index 2854f556bf8d8..071366d7c3345 100644
--- a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java
+++ b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java
@@ -81,16 +81,16 @@ public void testDocValue() throws Exception {
             doc.endArray();
         }
         doc.endObject();
-        ParsedDocument d = mapper.parse(new SourceToParse("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON));
+        ParsedDocument d = mapper.parse(new SourceToParse("test", "1", BytesReference.bytes(doc), XContentType.JSON));
         writer.addDocument(d.rootDoc());
 
         BytesRef bytes1 = randomBytes();
         doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1.bytes, bytes1.offset, bytes1.length).endObject();
-        d = mapper.parse(new SourceToParse("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON));
+        d = mapper.parse(new SourceToParse("test", "2", BytesReference.bytes(doc), XContentType.JSON));
         writer.addDocument(d.rootDoc());
 
         doc = XContentFactory.jsonBuilder().startObject().endObject();
-        d = mapper.parse(new SourceToParse("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON));
+        d = mapper.parse(new SourceToParse("test", "3", BytesReference.bytes(doc), XContentType.JSON));
         writer.addDocument(d.rootDoc());
 
         // test remove duplicate value
@@ -106,7 +106,7 @@ public void testDocValue() throws Exception {
             doc.endArray();
         }
         doc.endObject();
-        d = mapper.parse(new SourceToParse("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON));
+        d = mapper.parse(new SourceToParse("test", "4", BytesReference.bytes(doc), XContentType.JSON));
         writer.addDocument(d.rootDoc());
 
         IndexFieldData<?> indexFieldData = getForField("field");
diff --git a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java
index 3a10b5c422578..374b7ac9a5271 100644
--- a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java
@@ -76,7 +76,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -97,7 +96,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception {
             mapper.parse(
                 new SourceToParse(
                     "test",
-                    "_doc",
                     "3",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder()
@@ -127,7 +125,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "_doc",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder().startObject().field(timestampFieldName, "2020-12-06T11:04:05.000Z").endObject()
@@ -146,7 +143,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa
             mapper.parse(
                 new SourceToParse(
                     "test",
-                    "_doc",
                     "2",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder().startObject().field("invalid-field-name", "2020-12-06T11:04:05.000Z").endObject()
@@ -165,7 +161,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa
             mapper.parse(
                 new SourceToParse(
                     "test",
-                    "_doc",
                     "3",
                     BytesReference.bytes(
                         XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java
index aa0a7f36a793f..0ad8dc3f138e0 100644
--- a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java
@@ -1063,8 +1063,7 @@ public void testParseToJsonAndParse() throws Exception {
         // reparse it
         DocumentMapper builtDocMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, builtMapping);
         BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json"));
-        Document doc = builtDocMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = builtDocMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         assertThat(doc.getBinaryValue(builtDocMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
         assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
     }
@@ -1076,8 +1075,7 @@ public void testSimpleParser() throws Exception {
         assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
 
         BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json"));
-        Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
         assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
     }
@@ -1086,8 +1084,7 @@ public void testSimpleParserNoTypeNoId() throws Exception {
         String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json");
         DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping);
         BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1-notype-noid.json"));
-        Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1")));
         assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred"));
     }
@@ -1109,7 +1106,7 @@ public void testNoDocumentSent() throws Exception {
         BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
         MapperParsingException e = expectThrows(
             MapperParsingException.class,
-            () -> docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
+            () -> docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON))
         );
         assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
     }
diff --git a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java
index f40ffa600ba8c..dee5db4e31253 100644
--- a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java
@@ -366,7 +366,7 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB
                 .field("quux", "3.2") // float detected through numeric detection
                 .endObject()
         );
-        ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "_doc", "id", source, builder.contentType()));
+        ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "id", source, builder.contentType()));
         Mapping update = parsedDocument.dynamicMappingsUpdate();
         assertNotNull(update);
         assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float"));
diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java
index 117d66f50a178..639de9d314641 100644
--- a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java
@@ -110,7 +110,6 @@ public void testInjectIntoDocDuringParsing() throws Exception {
         ParsedDocument doc = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder().startObject().field("a", "100").startObject("b").field("c", 42).endObject().endObject()
@@ -148,7 +147,6 @@ public void testExplicitEnabled() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
                 XContentType.JSON
@@ -179,7 +177,6 @@ public void testDisabled() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
                 XContentType.JSON
diff --git a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java
index cc4626bc89641..9c9c0440231de 100644
--- a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java
@@ -53,7 +53,7 @@ public void testSimple() throws Exception {
 
         byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json");
         ParsedDocument parsedDoc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON));
+            .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON));
         client().admin()
             .indices()
             .preparePutMapping("test")
diff --git a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java
index 718e945042218..e897abad405d5 100644
--- a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java
@@ -72,7 +72,6 @@ public void testIncludeInObjectNotAllowed() throws Exception {
             docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_id", "1").endObject()),
                     XContentType.JSON
@@ -91,7 +90,7 @@ public void testDefaults() throws IOException {
         Settings indexSettings = Settings.EMPTY;
         MapperService mapperService = createIndex("test", indexSettings).mapperService();
         DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
-        ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
+        ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
         IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME);
         assertEquals(1, fields.length);
         assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions());
diff --git a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java
index b27eb54fbfe59..c4225cb576550 100644
--- a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java
@@ -63,7 +63,6 @@ public void testDefaultDisabledIndexMapper() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
                 XContentType.JSON
diff --git a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java
index 33306c5842674..07fa602272b3d 100644
--- a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java
@@ -79,7 +79,6 @@ public void testStoreCidr() throws Exception {
             ParsedDocument doc = mapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", entry.getKey()).endObject()),
                     XContentType.JSON
diff --git a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java
index b5989d93b520d..7e00a463124f1 100644
--- a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java
@@ -56,9 +56,7 @@ public void testMergeMultiField() throws Exception {
         assertThat(mapperService.fieldType("name.indexed"), nullValue());
 
         BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject());
-        Document doc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         IndexableField f = doc.getField("name");
         assertThat(f, notNullValue());
         f = doc.getField("name.indexed");
@@ -74,9 +72,7 @@ public void testMergeMultiField() throws Exception {
         assertThat(mapperService.fieldType("name.not_indexed2"), nullValue());
         assertThat(mapperService.fieldType("name.not_indexed3"), nullValue());
 
-        doc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         f = doc.getField("name");
         assertThat(f, notNullValue());
         f = doc.getField("name.indexed");
@@ -113,9 +109,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception {
         assertThat(mapperService.fieldType("name.indexed"), nullValue());
 
         BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject());
-        Document doc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         IndexableField f = doc.getField("name");
         assertThat(f, notNullValue());
         f = doc.getField("name.indexed");
@@ -131,9 +125,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception {
         assertThat(mapperService.fieldType("name.not_indexed2"), nullValue());
         assertThat(mapperService.fieldType("name.not_indexed3"), nullValue());
 
-        doc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
         f = doc.getField("name");
         assertThat(f, notNullValue());
         f = doc.getField("name.indexed");
diff --git a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java
index 918f5b325d81a..4027cf20baba8 100644
--- a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java
@@ -76,9 +76,7 @@ private void testMultiField(String mapping) throws Exception {
             .merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
 
         BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json"));
-        Document doc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
 
         IndexableField f = doc.getField("name");
         assertThat(f.name(), equalTo("name"));
@@ -157,8 +155,7 @@ public void testBuildThenParse() throws Exception {
             .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(builtMapping));
 
         BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json"));
-        Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON))
-            .rootDoc();
+        Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc();
 
         IndexableField f = doc.getField("name");
         assertThat(f.name(), equalTo("name"));
diff --git a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java
index 045cc97275eb7..fe3ce5da6c90a 100644
--- a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java
@@ -86,7 +86,6 @@ public void testEmptyNested() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").nullField("nested1").endObject()),
                 XContentType.JSON
@@ -98,7 +97,6 @@ public void testEmptyNested() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder().startObject().field("field", "value").startArray("nested").endArray().endObject()
@@ -135,7 +133,6 @@ public void testSingleNested() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -161,7 +158,6 @@ public void testSingleNested() throws Exception {
         doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -230,7 +226,6 @@ public void testMultiNested() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -325,7 +320,6 @@ public void testMultiObjectAndNested1() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -421,7 +415,6 @@ public void testMultiObjectAndNested2() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -516,7 +509,6 @@ public void testMultiRootAndNested1() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -611,7 +603,6 @@ public void testMultipleLevelsIncludeRoot1() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                MapperService.SINGLE_MAPPING_NAME,
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -681,7 +672,6 @@ public void testMultipleLevelsIncludeRoot2() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                MapperService.SINGLE_MAPPING_NAME,
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -766,7 +756,6 @@ public void testMultipleLevelsIncludeRootWithMerge() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                MapperService.SINGLE_MAPPING_NAME,
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -822,7 +811,6 @@ public void testNestedArrayStrict() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -975,7 +963,7 @@ public void testLimitNestedDocsDefaultSettings() throws Exception {
             docBuilder.endArray();
         }
         docBuilder.endObject();
-        SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
+        SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
         MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1));
         assertEquals(
             "The number of nested documents has exceeded the allowed limit of ["
@@ -1020,7 +1008,7 @@ public void testLimitNestedDocs() throws Exception {
             docBuilder.endArray();
         }
         docBuilder.endObject();
-        SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
+        SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
         ParsedDocument doc = docMapper.parse(source1);
         assertThat(doc.docs().size(), equalTo(3));
 
@@ -1037,7 +1025,7 @@ public void testLimitNestedDocs() throws Exception {
             docBuilder2.endArray();
         }
         docBuilder2.endObject();
-        SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
+        SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
         MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
         assertEquals(
             "The number of nested documents has exceeded the allowed limit of ["
@@ -1089,7 +1077,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception {
             docBuilder.endArray();
         }
         docBuilder.endObject();
-        SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
+        SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON);
         ParsedDocument doc = docMapper.parse(source1);
         assertThat(doc.docs().size(), equalTo(3));
 
@@ -1111,7 +1099,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception {
 
         }
         docBuilder2.endObject();
-        SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
+        SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON);
         MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
         assertEquals(
             "The number of nested documents has exceeded the allowed limit of ["
diff --git a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java
index 9085c637ef89e..95c21823bfcae 100644
--- a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java
@@ -65,7 +65,6 @@ public void testNullValueObject() throws IOException {
         ParsedDocument doc = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder().startObject().startObject("obj1").endObject().field("value1", "test1").endObject()
@@ -79,7 +78,6 @@ public void testNullValueObject() throws IOException {
         doc = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("obj1").field("value1", "test1").endObject()),
                 XContentType.JSON
@@ -91,7 +89,6 @@ public void testNullValueObject() throws IOException {
         doc = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
index c82f918e55240..079475d9f3554 100644
--- a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java
@@ -59,7 +59,6 @@ public void testDifferentInnerObjectTokenFailure() throws Exception {
             defaultMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     new BytesArray(
                         " {\n"
diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java
index 4976372ceaf23..e98dc399b3b41 100644
--- a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java
@@ -53,7 +53,7 @@ public void testSimple() throws Exception {
 
         byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json");
         ParsedDocument parsedDoc = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON));
+            .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON));
         client().admin()
             .indices()
             .preparePutMapping("test")
diff --git a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java
index a56521476c2d8..92236ad34013b 100644
--- a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java
@@ -53,7 +53,6 @@ public void testRoutingMapper() throws Exception {
         ParsedDocument doc = docMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
                 XContentType.JSON,
@@ -75,7 +74,6 @@ public void testIncludeInObjectNotAllowed() throws Exception {
             docMapper.parse(
                 new SourceToParse(
                     "test",
-                    "type",
                     "1",
                     BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject()),
                     XContentType.JSON
diff --git a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java
index e37ef76ce9443..3cb16b452cbf4 100644
--- a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java
@@ -69,7 +69,6 @@ public void testNoFormat() throws Exception {
         ParsedDocument doc = documentMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()),
                 XContentType.JSON
@@ -82,7 +81,6 @@ public void testNoFormat() throws Exception {
         doc = documentMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(XContentFactory.smileBuilder().startObject().field("field", "value").endObject()),
                 XContentType.SMILE
@@ -111,7 +109,6 @@ public void testIncludes() throws Exception {
         ParsedDocument doc = documentMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -156,7 +153,6 @@ public void testExcludes() throws Exception {
         ParsedDocument doc = documentMapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
@@ -325,8 +321,8 @@ public void testSourceObjectContainsExtraTokens() throws Exception {
             .parse("type", new CompressedXContent(mapping));
 
         try {
-            documentMapper.parse(new SourceToParse("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object
-                                                                                                                    // (invalid JSON)
+            documentMapper.parse(new SourceToParse("test", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object
+                                                                                                            // (invalid JSON)
             fail("Expected parse exception");
         } catch (MapperParsingException e) {
             assertNotNull(e.getRootCause());
diff --git a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java
index f91120d4cf199..65776001381a0 100644
--- a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java
@@ -111,7 +111,6 @@ public void testBytesAndNumericRepresentation() throws Exception {
         ParsedDocument doc = mapper.parse(
             new SourceToParse(
                 "test",
-                "type",
                 "1",
                 BytesReference.bytes(
                     XContentFactory.jsonBuilder()
diff --git a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java
index d61c25c5ec622..89eee655ca9d4 100644
--- a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java
+++ b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java
@@ -73,7 +73,7 @@ public void testDocValuesSingleType() throws Exception {
     public static void testDocValues(Function<String, IndexService> createIndex) throws IOException {
         MapperService mapperService = createIndex.apply("test").mapperService();
         DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
-        ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
+        ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
 
         Directory dir = newDirectory();
         IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
@@ -100,7 +100,7 @@ public void testDefaults() throws IOException {
         Settings indexSettings = Settings.EMPTY;
         MapperService mapperService = createIndex("test", indexSettings).mapperService();
         DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE);
-        ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON));
+        ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON));
         assertEquals(Collections.<IndexableField>emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME)));
     }
 }
diff --git a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java
index 6e2efe56a69d7..a88db8473cae0 100644
--- a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java
+++ b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java
@@ -57,6 +57,7 @@
 import org.opensearch.index.engine.InternalEngineTests;
 import org.opensearch.index.engine.SegmentsStats;
 import org.opensearch.index.engine.VersionConflictEngineException;
+import org.opensearch.index.mapper.MapperService;
 import org.opensearch.index.mapper.SeqNoFieldMapper;
 import org.opensearch.index.seqno.SeqNoStats;
 import org.opensearch.index.seqno.SequenceNumbers;
@@ -75,7 +76,6 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.CyclicBarrier;
@@ -315,10 +315,7 @@ public void testCheckpointsAdvance() throws Exception {
     }
 
     public void testConflictingOpsOnReplica() throws Exception {
-        Map<String, String> mappings = Collections.singletonMap(
-            "type",
-            "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
-        );
+        String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
         try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
             shards.startAll();
             List<IndexShard> replicas = shards.getReplicas();
@@ -345,10 +342,7 @@ public void testConflictingOpsOnReplica() throws Exception {
     }
 
     public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exception {
-        Map<String, String> mappings = Collections.singletonMap(
-            "type",
-            "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
-        );
+        String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
         try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
             shards.startAll();
             long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm();
@@ -398,10 +392,7 @@ public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exce
     }
 
     public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exception {
-        Map<String, String> mappings = Collections.singletonMap(
-            "type",
-            "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
-        );
+        String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
         try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(1, mappings))) {
             shards.startAll();
             long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm();
diff --git a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java
index cccb2f470195b..add2ecd34e3af 100644
--- a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java
+++ b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java
@@ -75,10 +75,8 @@
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.EnumSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Optional;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.Future;
@@ -159,7 +157,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception {
                 1,
                 randomNonNegativeLong(),
                 false,
-                new SourceToParse("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse("index", "replica", new BytesArray("{}"), XContentType.JSON)
             );
             shards.promoteReplicaToPrimary(promotedReplica).get();
             oldPrimary.close("demoted", randomBoolean());
@@ -173,7 +171,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception {
                 promotedReplica.applyIndexOperationOnPrimary(
                     Versions.MATCH_ANY,
                     VersionType.INTERNAL,
-                    new SourceToParse("index", "type", "primary", new BytesArray("{}"), XContentType.JSON),
+                    new SourceToParse("index", "primary", new BytesArray("{}"), XContentType.JSON),
                     SequenceNumbers.UNASSIGNED_SEQ_NO,
                     0,
                     IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
@@ -358,10 +356,7 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception {
     }
 
     public void testResyncAfterPrimaryPromotion() throws Exception {
-        Map<String, String> mappings = Collections.singletonMap(
-            "type",
-            "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"
-        );
+        String mappings = "{ \"_doc\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}";
         try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) {
             shards.startAll();
             int initialDocs = randomInt(10);
diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
index c9179c9531c29..3bc5218e2f61f 100644
--- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java
@@ -1861,7 +1861,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {
         assertEquals(0, postDelete.get());
         assertEquals(0, postDeleteException.get());
 
-        deleteDoc(shard, "_doc", "1");
+        deleteDoc(shard, "1");
 
         assertEquals(2, preIndex.get());
         assertEquals(1, postIndexCreate.get());
@@ -1889,7 +1889,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {
         assertEquals(1, postDelete.get());
         assertEquals(0, postDeleteException.get());
         try {
-            deleteDoc(shard, "_doc", "1");
+            deleteDoc(shard, "1");
             fail();
         } catch (AlreadyClosedException e) {
 
@@ -2184,7 +2184,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
         final IndexShard shard = newStartedShard(false);
         long primaryTerm = shard.getOperationPrimaryTerm();
         shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete
-        shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "_doc", "id");
+        shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id");
         shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation
         shard.applyIndexOperationOnReplica(
             0,
@@ -2192,7 +2192,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
             1,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(shard.shardId().getIndexName(), "id", new BytesArray("{}"), XContentType.JSON)
         );
         shard.applyIndexOperationOnReplica(
             3,
@@ -2200,7 +2200,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
             3,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(shard.shardId().getIndexName(), "id-3", new BytesArray("{}"), XContentType.JSON)
         );
         // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery.
         shard.flush(new FlushRequest().force(true).waitIfOngoing(true));
@@ -2210,7 +2210,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
             3,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(shard.shardId().getIndexName(), "id-2", new BytesArray("{}"), XContentType.JSON)
         );
         shard.applyIndexOperationOnReplica(
             5,
@@ -2218,7 +2218,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException {
             1,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(shard.shardId().getIndexName(), "id-5", new BytesArray("{}"), XContentType.JSON)
         );
         shard.sync(); // advance local checkpoint
 
@@ -2358,13 +2358,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException {
         // start a replica shard and index the second doc
         final IndexShard otherShard = newStartedShard(false);
         updateMappings(otherShard, shard.indexSettings().getIndexMetadata());
-        SourceToParse sourceToParse = new SourceToParse(
-            shard.shardId().getIndexName(),
-            "_doc",
-            "1",
-            new BytesArray("{}"),
-            XContentType.JSON
-        );
+        SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), "1", new BytesArray("{}"), XContentType.JSON);
         otherShard.applyIndexOperationOnReplica(
             1,
             otherShard.getOperationPrimaryTerm(),
@@ -2498,7 +2492,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception {
             1,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(indexName, "doc-0", new BytesArray("{}"), XContentType.JSON)
         );
         flushShard(shard);
         shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here.
@@ -2508,7 +2502,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception {
             1,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(indexName, "doc-1", new BytesArray("{}"), XContentType.JSON)
         );
         flushShard(shard);
         assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1"));
@@ -2520,7 +2514,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception {
             1,
             IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
             false,
-            new SourceToParse(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)
+            new SourceToParse(indexName, "doc-2", new BytesArray("{}"), XContentType.JSON)
         );
         flushShard(shard);
         assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2"));
@@ -2708,7 +2702,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\", \"fielddata\": true }}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\", \"fielddata\": true }}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -2761,7 +2755,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException {
     public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException {
         IndexShard shard = newStartedShard(true);
         indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}");
-        deleteDoc(shard, "_doc", "0");
+        deleteDoc(shard, "0");
         indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}");
         shard.refresh("test");
 
@@ -2848,7 +2842,7 @@ public void testTranslogRecoverySyncsTranslog() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -2893,7 +2887,7 @@ public void testRecoverFromTranslog() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE))
             .build();
@@ -2905,7 +2899,6 @@ public void testRecoverFromTranslog() throws IOException {
             if (randomBoolean()) {
                 operations.add(
                     new Translog.Index(
-                        "_doc",
                         "1",
                         0,
                         primary.getPendingPrimaryTerm(),
@@ -2919,7 +2912,6 @@ public void testRecoverFromTranslog() throws IOException {
                 // corrupt entry
                 operations.add(
                     new Translog.Index(
-                        "_doc",
                         "2",
                         1,
                         primary.getPendingPrimaryTerm(),
@@ -2978,7 +2970,7 @@ public void testShardActiveDuringPeerRecovery() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3028,7 +3020,7 @@ public void testRefreshListenersDuringPeerRecovery() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3101,7 +3093,7 @@ public void testRecoverFromLocalShard() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("source")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3238,7 +3230,7 @@ public void testDocStats() throws Exception {
             );
             for (final Integer i : ids) {
                 final String id = Integer.toString(i);
-                deleteDoc(indexShard, "_doc", id);
+                deleteDoc(indexShard, id);
                 indexDoc(indexShard, "_doc", id);
             }
             // Need to update and sync the global checkpoint and the retention leases for the soft-deletes retention MergePolicy.
@@ -3355,7 +3347,11 @@ public void testEstimateTotalDocSize() throws Exception {
 
             // Do some updates and deletes, then recheck the correlation again.
             for (int i = 0; i < numDoc / 2; i++) {
-                indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
+                if (randomBoolean()) {
+                    deleteDoc(indexShard, Integer.toString(i));
+                } else {
+                    indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}");
+                }
             }
             if (randomBoolean()) {
                 indexShard.flush(new FlushRequest());
@@ -3705,7 +3701,6 @@ private Result indexOnReplicaWithGaps(final IndexShard indexShard, final int ope
                 }
                 SourceToParse sourceToParse = new SourceToParse(
                     indexShard.shardId().getIndexName(),
-                    "_doc",
                     id,
                     new BytesArray("{}"),
                     XContentType.JSON
@@ -3742,7 +3737,7 @@ public void testIsSearchIdle() throws Exception {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3792,7 +3787,7 @@ public void testScheduledRefresh() throws Exception {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3866,7 +3861,7 @@ public void testRefreshIsNeededWithRefreshListeners() throws IOException, Interr
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -3932,10 +3927,7 @@ public void testOnCloseStats() throws IOException {
     public void testSupplyTombstoneDoc() throws Exception {
         IndexShard shard = newStartedShard();
         String id = randomRealisticUnicodeOfLengthBetween(1, 10);
-        ParsedDocument deleteTombstone = shard.getEngine()
-            .config()
-            .getTombstoneDocSupplier()
-            .newDeleteTombstoneDoc(MapperService.SINGLE_MAPPING_NAME, id);
+        ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc(id);
         assertThat(deleteTombstone.docs(), hasSize(1));
         ParseContext.Document deleteDoc = deleteTombstone.docs().get(0);
         assertThat(
@@ -4166,14 +4158,13 @@ public void testResetEngineWithBrokenTranslog() throws Exception {
         updateMappings(
             shard,
             IndexMetadata.builder(shard.indexSettings.getIndexMetadata())
-                .putMapping("_doc", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+                .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
                 .build()
         );
         final List<Translog.Operation> operations = Stream.concat(
             IntStream.range(0, randomIntBetween(0, 10))
                 .mapToObj(
                     n -> new Translog.Index(
-                        "_doc",
                         "1",
                         0,
                         shard.getPendingPrimaryTerm(),
@@ -4187,7 +4178,6 @@ public void testResetEngineWithBrokenTranslog() throws Exception {
             IntStream.range(0, randomIntBetween(1, 10))
                 .mapToObj(
                     n -> new Translog.Index(
-                        "_doc",
                         "1",
                         0,
                         shard.getPendingPrimaryTerm(),
@@ -4297,7 +4287,7 @@ public void testTypelessGet() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("index")
-            .putMapping("some_type", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -4350,7 +4340,7 @@ public void testDoNotTrimCommitsWhenOpenReadOnlyEngine() throws Exception {
                 1,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(shard.shardId.getIndexName(), "_doc", Long.toString(i), new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(shard.shardId.getIndexName(), Long.toString(i), new BytesArray("{}"), XContentType.JSON)
             );
             shard.updateGlobalCheckpointOnReplica(shard.getLocalCheckpoint(), "test");
             if (randomInt(100) < 10) {
diff --git a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java
index 63734831d0964..6ac5f96e11f34 100644
--- a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java
@@ -161,7 +161,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) {
             logger
         );
         ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null);
-        Engine.Delete delete = new Engine.Delete("test", "1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong());
+        Engine.Delete delete = new Engine.Delete("1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong());
         Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc);
         compositeListener.postDelete(randomShardId, delete, new Engine.DeleteResult(1, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, true));
         assertEquals(0, preIndex.get());
diff --git a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java
index 1c3fa908f11da..631fa384de335 100644
--- a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java
@@ -94,7 +94,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception {
             shard.applyIndexOperationOnPrimary(
                 Versions.MATCH_ANY,
                 VersionType.INTERNAL,
-                new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
+                new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
                 SequenceNumbers.UNASSIGNED_SEQ_NO,
                 0,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
@@ -176,7 +176,7 @@ public void testSyncerOnClosingShard() throws Exception {
             shard.applyIndexOperationOnPrimary(
                 Versions.MATCH_ANY,
                 VersionType.INTERNAL,
-                new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
+                new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
                 SequenceNumbers.UNASSIGNED_SEQ_NO,
                 0,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
diff --git a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java
index 97fd7fc8f279f..eea316d9a9370 100644
--- a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java
@@ -440,17 +440,7 @@ private Engine.IndexResult index(String id, String testFieldValue) throws IOExce
         document.add(seqID.seqNoDocValue);
         document.add(seqID.primaryTerm);
         BytesReference source = new BytesArray(new byte[] { 1 });
-        ParsedDocument doc = new ParsedDocument(
-            versionField,
-            seqID,
-            id,
-            "test",
-            null,
-            Arrays.asList(document),
-            source,
-            XContentType.JSON,
-            null
-        );
+        ParsedDocument doc = new ParsedDocument(versionField, seqID, id, null, Arrays.asList(document), source, XContentType.JSON, null);
         Engine.Index index = new Engine.Index(new Term("_id", doc.id()), engine.config().getPrimaryTermSupplier().getAsLong(), doc);
         return engine.index(index);
     }
diff --git a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java
index c3ce944f51588..1b8809ba04278 100644
--- a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java
@@ -142,7 +142,7 @@ public void setup() throws IOException {
         final IndexMetadata.Builder metadata = IndexMetadata.builder(routing.getIndexName())
             .settings(settings)
             .primaryTerm(0, randomIntBetween(1, 100))
-            .putMapping("_doc", "{ \"properties\": {} }");
+            .putMapping("{ \"properties\": {} }");
         indexMetadata = metadata.build();
 
         clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().put(indexMetadata, false).build()).build();
diff --git a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java
index a04be37176389..5dd053574268e 100644
--- a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java
+++ b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java
@@ -59,7 +59,7 @@ public void testGetForUpdate() throws IOException {
 
             .build();
         IndexMetadata metadata = IndexMetadata.builder("test")
-            .putMapping("test", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
@@ -74,7 +74,7 @@ public void testGetForUpdate() throws IOException {
             assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed
         }
 
-        Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
+        Engine.IndexResult test1 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
         assertTrue(primary.getEngine().refreshNeeded());
         GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
         assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}");
@@ -89,7 +89,7 @@ public void testGetForUpdate() throws IOException {
         }
 
         // now again from the reader
-        Engine.IndexResult test2 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
+        Engine.IndexResult test2 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar");
         assertTrue(primary.getEngine().refreshNeeded());
         testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
         assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}");
@@ -134,7 +134,6 @@ private void runGetFromTranslogWithOptions(
 
         IndexMetadata metadata = IndexMetadata.builder("test")
             .putMapping(
-                MapperService.SINGLE_MAPPING_NAME,
                 "{ \"properties\": { \"foo\":  { \"type\": "
                     + fieldType
                     + ", \"store\": true }, "
@@ -158,7 +157,7 @@ private void runGetFromTranslogWithOptions(
             assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed
         }
 
-        Engine.IndexResult test1 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "1", docToIndex, XContentType.JSON, "foobar");
+        Engine.IndexResult test1 = indexDoc(primary, "1", docToIndex, XContentType.JSON, "foobar");
         assertTrue(primary.getEngine().refreshNeeded());
         GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM);
         assertEquals(new String(testGet1.source() == null ? new byte[0] : testGet1.source(), StandardCharsets.UTF_8), expectedResult);
@@ -172,7 +171,7 @@ private void runGetFromTranslogWithOptions(
             assertEquals(searcher.getIndexReader().maxDoc(), 2);
         }
 
-        Engine.IndexResult test2 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "2", docToIndex, XContentType.JSON, "foobar");
+        Engine.IndexResult test2 = indexDoc(primary, "2", docToIndex, XContentType.JSON, "foobar");
         assertTrue(primary.getEngine().refreshNeeded());
         GetResult testGet2 = primary.getService()
             .get("2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE);
@@ -204,7 +203,7 @@ public void testTypelessGetForUpdate() throws IOException {
             .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
             .build();
         IndexMetadata metadata = IndexMetadata.builder("index")
-            .putMapping("some_type", "{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
+            .putMapping("{ \"properties\": { \"foo\":  { \"type\": \"text\"}}}")
             .settings(settings)
             .primaryTerm(0, 1)
             .build();
diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java
index 5614e07d7104d..f1eb5666f6b7f 100644
--- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java
+++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java
@@ -358,13 +358,13 @@ public void testSimpleOperations() throws IOException {
             assertThat(snapshot, SnapshotMatchers.size(0));
         }
 
-        addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
         try (Translog.Snapshot snapshot = translog.newSnapshot()) {
             assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
             assertThat(snapshot.totalOperations(), equalTo(ops.size()));
         }
 
-        addToTranslogAndList(translog, ops, new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2")));
+        addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
         try (Translog.Snapshot snapshot = translog.newSnapshot()) {
             assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
             assertThat(snapshot.totalOperations(), equalTo(ops.size()));
@@ -454,34 +454,34 @@ public void testStats() throws IOException {
             assertThat(stats.estimatedNumberOfOperations(), equalTo(0));
         }
         assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogHeader.TRANSLOG_CODEC)));
-        translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
 
         {
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(1));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(162L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(157L));
             assertThat(stats.getUncommittedOperations(), equalTo(1));
-            assertThat(stats.getUncommittedSizeInBytes(), equalTo(107L));
+            assertThat(stats.getUncommittedSizeInBytes(), equalTo(102L));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
         }
 
-        translog.add(new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2")));
+        translog.add(new Translog.Delete("2", 1, primaryTerm.get(), newUid("2")));
         {
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(2));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(210L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(200L));
             assertThat(stats.getUncommittedOperations(), equalTo(2));
-            assertThat(stats.getUncommittedSizeInBytes(), equalTo(155L));
+            assertThat(stats.getUncommittedSizeInBytes(), equalTo(145L));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
         }
 
-        translog.add(new Translog.Delete("test", "3", 2, primaryTerm.get(), newUid("3")));
+        translog.add(new Translog.Delete("3", 2, primaryTerm.get(), newUid("3")));
         {
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(3));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(258L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(243L));
             assertThat(stats.getUncommittedOperations(), equalTo(3));
-            assertThat(stats.getUncommittedSizeInBytes(), equalTo(203L));
+            assertThat(stats.getUncommittedSizeInBytes(), equalTo(188L));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
         }
 
@@ -489,9 +489,9 @@ public void testStats() throws IOException {
         {
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(300L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(285L));
             assertThat(stats.getUncommittedOperations(), equalTo(4));
-            assertThat(stats.getUncommittedSizeInBytes(), equalTo(245L));
+            assertThat(stats.getUncommittedSizeInBytes(), equalTo(230L));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
         }
 
@@ -499,9 +499,9 @@ public void testStats() throws IOException {
         {
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(355L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(340L));
             assertThat(stats.getUncommittedOperations(), equalTo(4));
-            assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L));
+            assertThat(stats.getUncommittedSizeInBytes(), equalTo(285L));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L));
         }
 
@@ -511,7 +511,7 @@ public void testStats() throws IOException {
             stats.writeTo(out);
             final TranslogStats copy = new TranslogStats(out.bytes().streamInput());
             assertThat(copy.estimatedNumberOfOperations(), equalTo(4));
-            assertThat(copy.getTranslogSizeInBytes(), equalTo(355L));
+            assertThat(copy.getTranslogSizeInBytes(), equalTo(340L));
 
             try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
                 builder.startObject();
@@ -521,9 +521,9 @@ public void testStats() throws IOException {
                     Strings.toString(builder),
                     equalTo(
                         "{\"translog\":{\"operations\":4,\"size_in_bytes\":"
-                            + 355
+                            + 340
                             + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":"
-                            + 300
+                            + 285
                             + ",\"earliest_last_modified_age\":"
                             + stats.getEarliestLastModifiedAge()
                             + "}}"
@@ -537,7 +537,7 @@ public void testStats() throws IOException {
             long lastModifiedAge = System.currentTimeMillis() - translog.getCurrent().getLastModifiedTime();
             final TranslogStats stats = stats();
             assertThat(stats.estimatedNumberOfOperations(), equalTo(4));
-            assertThat(stats.getTranslogSizeInBytes(), equalTo(355L));
+            assertThat(stats.getTranslogSizeInBytes(), equalTo(340L));
             assertThat(stats.getUncommittedOperations(), equalTo(0));
             assertThat(stats.getUncommittedSizeInBytes(), equalTo(firstOperationPosition));
             assertThat(stats.getEarliestLastModifiedAge(), greaterThanOrEqualTo(lastModifiedAge));
@@ -553,7 +553,7 @@ public void testUncommittedOperations() throws Exception {
         int uncommittedOps = 0;
         int operationsInLastGen = 0;
         for (int i = 0; i < operations; i++) {
-            translog.add(new Translog.Index("test", Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 }));
+            translog.add(new Translog.Index(Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 }));
             uncommittedOps++;
             operationsInLastGen++;
             if (rarely()) {
@@ -634,7 +634,7 @@ public void testBasicSnapshot() throws IOException {
             assertThat(snapshot, SnapshotMatchers.size(0));
         }
 
-        addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
 
         try (Translog.Snapshot snapshot = translog.newSnapshot(0, Long.MAX_VALUE)) {
             assertThat(snapshot, SnapshotMatchers.equalsTo(ops));
@@ -656,9 +656,9 @@ public void testBasicSnapshot() throws IOException {
     public void testReadLocation() throws IOException {
         ArrayList<Translog.Operation> ops = new ArrayList<>();
         ArrayList<Translog.Location> locs = new ArrayList<>();
-        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })));
-        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 1 })));
-        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 1 })));
+        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })));
+        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 1 })));
+        locs.add(addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 1 })));
         int i = 0;
         for (Translog.Operation op : ops) {
             assertEquals(op, translog.readOperation(locs.get(i++)));
@@ -674,16 +674,16 @@ public void testSnapshotWithNewTranslog() throws IOException {
             toClose.add(snapshot);
             assertThat(snapshot, SnapshotMatchers.size(0));
 
-            addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+            addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
             Translog.Snapshot snapshot1 = translog.newSnapshot();
             toClose.add(snapshot1);
 
-            addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 }));
+            addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 }));
 
             assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0)));
 
             translog.rollGeneration();
-            addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 }));
+            addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 }));
 
             Translog.Snapshot snapshot2 = translog.newSnapshot();
             toClose.add(snapshot2);
@@ -697,7 +697,7 @@ public void testSnapshotWithNewTranslog() throws IOException {
 
     public void testSnapshotOnClosedTranslog() throws IOException {
         assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1))));
-        translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
         translog.close();
         AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot());
         assertEquals(ex.getMessage(), "translog is already closed");
@@ -719,13 +719,7 @@ public void testRangeSnapshot() throws Exception {
             }
             List<Translog.Operation> ops = new ArrayList<>(seqNos.size());
             for (long seqNo : seqNos) {
-                Translog.Index op = new Translog.Index(
-                    "_doc",
-                    randomAlphaOfLength(10),
-                    seqNo,
-                    primaryTerm.get(),
-                    new byte[] { randomByte() }
-                );
+                Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() });
                 translog.add(op);
                 ops.add(op);
             }
@@ -786,13 +780,7 @@ private Long populateTranslogOps(boolean withMissingOps) throws IOException {
             Collections.shuffle(seqNos, new Random(100));
             List<Translog.Operation> ops = new ArrayList<>(seqNos.size());
             for (long seqNo : seqNos) {
-                Translog.Index op = new Translog.Index(
-                    "_doc",
-                    randomAlphaOfLength(10),
-                    seqNo,
-                    primaryTerm.get(),
-                    new byte[] { randomByte() }
-                );
+                Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() });
                 boolean shouldAdd = !withMissingOps || seqNo % 4 != 0;
                 if (shouldAdd) {
                     translog.add(op);
@@ -928,7 +916,6 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable {
                         Translog.Index expIndexOp = (Translog.Index) expectedOp;
                         assertEquals(expIndexOp.id(), indexOp.id());
                         assertEquals(expIndexOp.routing(), indexOp.routing());
-                        assertEquals(expIndexOp.type(), indexOp.type());
                         assertEquals(expIndexOp.source(), indexOp.source());
                         assertEquals(expIndexOp.version(), indexOp.version());
                         break;
@@ -962,7 +949,7 @@ public void testTranslogCorruption() throws Exception {
         int translogOperations = randomIntBetween(10, 1000);
         for (int op = 0; op < translogOperations; op++) {
             String ascii = randomAlphaOfLengthBetween(1, 50);
-            locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
+            locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
 
             if (rarely()) {
                 translog.rollGeneration();
@@ -989,7 +976,7 @@ public void testTruncatedTranslogs() throws Exception {
         int translogOperations = randomIntBetween(10, 100);
         for (int op = 0; op < translogOperations; op++) {
             String ascii = randomAlphaOfLengthBetween(1, 50);
-            locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
+            locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"))));
         }
         translog.sync();
 
@@ -1034,7 +1021,7 @@ private Term newUid(String id) {
 
     public void testVerifyTranslogIsNotDeleted() throws IOException {
         assertFileIsPresent(translog, 1);
-        translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
         try (Translog.Snapshot snapshot = translog.newSnapshot()) {
             assertThat(snapshot, SnapshotMatchers.size(1));
             assertFileIsPresent(translog, 1);
@@ -1086,10 +1073,10 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep
                         switch (type) {
                             case CREATE:
                             case INDEX:
-                                op = new Translog.Index("type", "" + id, id, primaryTerm.get(), new byte[] { (byte) id });
+                                op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[] { (byte) id });
                                 break;
                             case DELETE:
-                                op = new Translog.Delete("test", Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id)));
+                                op = new Translog.Delete(Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id)));
                                 break;
                             case NO_OP:
                                 op = new Translog.NoOp(id, 1, Long.toString(id));
@@ -1248,7 +1235,7 @@ public void testSyncUpTo() throws IOException {
         for (int op = 0; op < translogOperations; op++) {
             int seqNo = ++count;
             final Translog.Location location = translog.add(
-                new Translog.Index("test", "" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
+                new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
             );
             if (randomBoolean()) {
                 assertTrue("at least one operation pending", translog.syncNeeded());
@@ -1257,13 +1244,7 @@ public void testSyncUpTo() throws IOException {
                 assertFalse("the last call to ensureSycned synced all previous ops", translog.syncNeeded());
                 seqNo = ++count;
                 translog.add(
-                    new Translog.Index(
-                        "test",
-                        "" + op,
-                        seqNo,
-                        primaryTerm.get(),
-                        Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))
-                    )
+                    new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")))
                 );
                 assertTrue("one pending operation", translog.syncNeeded());
                 assertFalse("this op has been synced before", translog.ensureSynced(location)); // not syncing now
@@ -1293,7 +1274,7 @@ public void testSyncUpToStream() throws IOException {
                     translog.rollGeneration();
                 }
                 final Translog.Location location = translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
+                    new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
                 );
                 locations.add(location);
             }
@@ -1325,7 +1306,7 @@ public void testLocationComparison() throws IOException {
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
                 translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
+                    new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8")))
                 )
             );
             if (rarely() && translogOperations > op + 1) {
@@ -1364,9 +1345,7 @@ public void testBasicCheckpoint() throws IOException {
         long lastSyncedGlobalCheckpoint = globalCheckpoint.get();
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
-                translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
-                )
+                translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
             );
             if (randomBoolean()) {
                 globalCheckpoint.set(globalCheckpoint.get() + randomIntBetween(1, 16));
@@ -1380,7 +1359,6 @@ public void testBasicCheckpoint() throws IOException {
         assertEquals(translogOperations, translog.totalOperations());
         translog.add(
             new Translog.Index(
-                "test",
                 "" + translogOperations,
                 translogOperations,
                 primaryTerm.get(),
@@ -1730,9 +1708,7 @@ public void testBasicRecovery() throws IOException {
         final boolean commitOften = randomBoolean();
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
-                translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
-                )
+                translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
             );
             final boolean commit = commitOften ? frequently() : rarely();
             if (commit && op < translogOperations - 1) {
@@ -1791,9 +1767,7 @@ public void testRecoveryUncommitted() throws IOException {
         final boolean sync = randomBoolean();
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
-                translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
-                )
+                translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
             );
             if (op == prepareOp) {
                 translogGeneration = translog.getGeneration();
@@ -1878,9 +1852,7 @@ public void testRecoveryUncommittedFileExists() throws IOException {
         final boolean sync = randomBoolean();
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
-                translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
-                )
+                translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
             );
             if (op == prepareOp) {
                 translogGeneration = translog.getGeneration();
@@ -1968,7 +1940,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException {
         Translog.TranslogGeneration translogGeneration = null;
         final boolean sync = randomBoolean();
         for (int op = 0; op < translogOperations; op++) {
-            translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8)));
+            translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8)));
             if (op == prepareOp) {
                 translogGeneration = translog.getGeneration();
                 translog.rollGeneration();
@@ -2003,7 +1975,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException {
         assertThat(
             translogCorruptedException.getMessage(),
             endsWith(
-                "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=3025, "
+                "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=2750, "
                     + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} "
                     + "but got Checkpoint{offset=0, numOps=0, generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, "
                     + "minTranslogGeneration=0, trimmedAboveSeqNo=-2}"
@@ -2050,7 +2022,6 @@ public void testSnapshotFromStreamInput() throws IOException {
         int translogOperations = randomIntBetween(10, 100);
         for (int op = 0; op < translogOperations; op++) {
             Translog.Index test = new Translog.Index(
-                "test",
                 "" + op,
                 op,
                 primaryTerm.get(),
@@ -2073,7 +2044,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr
 
         for (int op = 0; op < extraDocs; op++) {
             String ascii = randomAlphaOfLengthBetween(1, 50);
-            Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8"));
+            Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8"));
             translog.add(operation);
         }
 
@@ -2093,13 +2064,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr
         translog.rollGeneration();
 
         // add a single operation to current with seq# > trimmed seq# but higher primary term
-        Translog.Index operation = new Translog.Index(
-            "test",
-            "" + 1,
-            1L,
-            primaryTerm.get(),
-            randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")
-        );
+        Translog.Index operation = new Translog.Index("" + 1, 1L, primaryTerm.get(), randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8"));
         translog.add(operation);
 
         // it is possible to trim after generation rollover
@@ -2129,7 +2094,7 @@ public void testSnapshotTrimmedOperations() throws Exception {
                 }
 
                 // use ongoing primaryTerms - or the same as it was
-                Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8"));
+                Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8"));
                 translog.add(operation);
                 inMemoryTranslog.add(operation);
                 allOperations.add(operation);
@@ -2213,7 +2178,7 @@ public void testRandomExceptionsOnTrimOperations() throws Exception {
             Randomness.shuffle(ops);
             for (int op : ops) {
                 String ascii = randomAlphaOfLengthBetween(1, 50);
-                Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"));
+                Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8"));
 
                 failableTLog.add(operation);
             }
@@ -2271,12 +2236,12 @@ public void testLocationHashCodeEquals() throws IOException {
             for (int op = 0; op < translogOperations; op++) {
                 locations.add(
                     translog.add(
-                        new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
+                        new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
                     )
                 );
                 locations2.add(
                     translog2.add(
-                        new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
+                        new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
                     )
                 );
             }
@@ -2305,9 +2270,7 @@ public void testOpenForeignTranslog() throws IOException {
         int firstUncommitted = 0;
         for (int op = 0; op < translogOperations; op++) {
             locations.add(
-                translog.add(
-                    new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
-                )
+                translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))))
             );
             if (randomBoolean()) {
                 translog.rollGeneration();
@@ -2355,10 +2318,10 @@ public void testOpenForeignTranslog() throws IOException {
     }
 
     public void testFailOnClosedWrite() throws IOException {
-        translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
         translog.close();
         try {
-            translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
+            translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
             fail("closed");
         } catch (AlreadyClosedException ex) {
             // all is well
@@ -2442,7 +2405,6 @@ public void run() {
                         case CREATE:
                         case INDEX:
                             op = new Translog.Index(
-                                "test",
                                 threadId + "_" + opCount,
                                 seqNoGenerator.getAndIncrement(),
                                 primaryTerm.get(),
@@ -2451,7 +2413,6 @@ public void run() {
                             break;
                         case DELETE:
                             op = new Translog.Delete(
-                                "test",
                                 threadId + "_" + opCount,
                                 new Term("_uid", threadId + "_" + opCount),
                                 seqNoGenerator.getAndIncrement(),
@@ -2499,7 +2460,6 @@ public void testFailFlush() throws IOException {
                 locations.add(
                     translog.add(
                         new Translog.Index(
-                            "test",
                             "" + opsSynced,
                             opsSynced,
                             primaryTerm.get(),
@@ -2529,7 +2489,6 @@ public void testFailFlush() throws IOException {
                 locations.add(
                     translog.add(
                         new Translog.Index(
-                            "test",
                             "" + opsSynced,
                             opsSynced,
                             primaryTerm.get(),
@@ -2611,7 +2570,6 @@ public void testTranslogOpsCountIsCorrect() throws IOException {
             locations.add(
                 translog.add(
                     new Translog.Index(
-                        "test",
                         "" + opsAdded,
                         opsAdded,
                         primaryTerm.get(),
@@ -2640,13 +2598,11 @@ public void testTragicEventCanBeAnyException() throws IOException {
         TranslogConfig config = getTranslogConfig(tempDir);
         Translog translog = getFailableTranslog(fail, config, false, true, null, createTranslogDeletionPolicy());
         LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly
-        translog.add(
-            new Translog.Index("test", "1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
-        );
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))));
         fail.failAlways();
         try {
             Translog.Location location = translog.add(
-                new Translog.Index("test", "2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
+                new Translog.Index("2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))
             );
             if (randomBoolean()) {
                 translog.ensureSynced(location);
@@ -2772,13 +2728,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException {
         int op = 0;
         for (; op < translogOperations / 2; op++) {
             translog.add(
-                new Translog.Index(
-                    "_doc",
-                    Integer.toString(op),
-                    op,
-                    primaryTerm.get(),
-                    Integer.toString(op).getBytes(Charset.forName("UTF-8"))
-                )
+                new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
             );
             if (rarely()) {
                 translog.rollGeneration();
@@ -2788,13 +2738,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException {
         long localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op);
         for (op = translogOperations / 2; op < translogOperations; op++) {
             translog.add(
-                new Translog.Index(
-                    "test",
-                    Integer.toString(op),
-                    op,
-                    primaryTerm.get(),
-                    Integer.toString(op).getBytes(Charset.forName("UTF-8"))
-                )
+                new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
             );
             if (rarely()) {
                 translog.rollGeneration();
@@ -2847,13 +2791,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException {
             int op = 0;
             for (; op < translogOperations / 2; op++) {
                 translog.add(
-                    new Translog.Index(
-                        "test",
-                        Integer.toString(op),
-                        op,
-                        primaryTerm.get(),
-                        Integer.toString(op).getBytes(Charset.forName("UTF-8"))
-                    )
+                    new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
                 );
                 if (rarely()) {
                     translog.rollGeneration();
@@ -2863,13 +2801,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException {
             localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op);
             for (op = translogOperations / 2; op < translogOperations; op++) {
                 translog.add(
-                    new Translog.Index(
-                        "test",
-                        Integer.toString(op),
-                        op,
-                        primaryTerm.get(),
-                        Integer.toString(op).getBytes(Charset.forName("UTF-8"))
-                    )
+                    new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))
                 );
                 if (rarely()) {
                     translog.rollGeneration();
@@ -3132,7 +3064,7 @@ public void testFailWhileCreateWriteWithRecoveredTLogs() throws IOException {
         Path tempDir = createTempDir();
         TranslogConfig config = getTranslogConfig(tempDir);
         Translog translog = createTranslog(config);
-        translog.add(new Translog.Index("test", "boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8"))));
         translog.close();
         try {
             new Translog(
@@ -3161,7 +3093,7 @@ protected TranslogWriter createWriter(
     }
 
     public void testRecoverWithUnbackedNextGen() throws IOException {
-        translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
         translog.close();
         TranslogConfig config = translog.getConfig();
 
@@ -3176,7 +3108,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException {
             assertNotNull("operation 1 must be non-null", op);
             assertEquals("payload mismatch for operation 1", 1, Integer.parseInt(op.getSource().source.utf8ToString()));
 
-            tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8"))));
+            tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8"))));
         }
 
         try (Translog tlog = openTranslog(config, translog.getTranslogUUID()); Translog.Snapshot snapshot = tlog.newSnapshot()) {
@@ -3193,7 +3125,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException {
     }
 
     public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException {
-        translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
         translog.close();
         TranslogConfig config = translog.getConfig();
         Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME);
@@ -3217,7 +3149,7 @@ public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException {
     }
 
     public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException {
-        translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
+        translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8"))));
         translog.close();
         TranslogConfig config = translog.getConfig();
         final String translogUUID = translog.getTranslogUUID();
@@ -3247,7 +3179,7 @@ public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException {
                     assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString()));
                 }
             }
-            tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
+            tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8"))));
         }
 
         TranslogException ex = expectThrows(
@@ -3293,7 +3225,7 @@ public void testWithRandomException() throws IOException {
                     for (int opsAdded = 0; opsAdded < numOps; opsAdded++) {
                         String doc = lineFileDocs.nextDoc().toString();
                         failableTLog.add(
-                            new Translog.Index("test", "" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8")))
+                            new Translog.Index("" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8")))
                         );
                         unsynced.add(doc);
                         if (randomBoolean()) {
@@ -3464,7 +3396,7 @@ public void testLegacyCheckpointVersion() throws IOException {
      * Tests that closing views after the translog is fine and we can reopen the translog
      */
     public void testPendingDelete() throws IOException {
-        translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }));
+        translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }));
         translog.rollGeneration();
         TranslogConfig config = translog.getConfig();
         final String translogUUID = translog.getTranslogUUID();
@@ -3478,10 +3410,10 @@ public void testPendingDelete() throws IOException {
             primaryTerm::get,
             seqNo -> {}
         );
-        translog.add(new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 }));
+        translog.add(new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 }));
         translog.rollGeneration();
         Closeable lock = translog.acquireRetentionLock();
-        translog.add(new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 }));
+        translog.add(new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 }));
         translog.close();
         IOUtils.close(lock);
         translog = new Translog(
@@ -3515,17 +3447,7 @@ public void testTranslogOpSerialization() throws Exception {
         document.add(seqID.seqNo);
         document.add(seqID.seqNoDocValue);
         document.add(seqID.primaryTerm);
-        ParsedDocument doc = new ParsedDocument(
-            versionField,
-            seqID,
-            "1",
-            "type",
-            null,
-            Arrays.asList(document),
-            B_1,
-            XContentType.JSON,
-            null
-        );
+        ParsedDocument doc = new ParsedDocument(versionField, seqID, "1", null, Arrays.asList(document), B_1, XContentType.JSON, null);
 
         Engine.Index eIndex = new Engine.Index(
             newUid(doc),
@@ -3554,7 +3476,6 @@ public void testTranslogOpSerialization() throws Exception {
         assertEquals(index, serializedIndex);
 
         Engine.Delete eDelete = new Engine.Delete(
-            doc.type(),
             doc.id(),
             newUid(doc),
             randomSeqNum,
@@ -3793,7 +3714,6 @@ public void testSnapshotReadOperationInReverse() throws Exception {
             final int operations = randomIntBetween(1, 100);
             for (int i = 0; i < operations; i++) {
                 Translog.Index op = new Translog.Index(
-                    "doc",
                     randomAlphaOfLength(10),
                     seqNo.getAndIncrement(),
                     primaryTerm.get(),
@@ -3823,7 +3743,7 @@ public void testSnapshotDedupOperations() throws Exception {
             List<Long> batch = LongStream.rangeClosed(0, between(0, 500)).boxed().collect(Collectors.toList());
             Randomness.shuffle(batch);
             for (Long seqNo : batch) {
-                Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 });
+                Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 });
                 translog.add(op);
                 latestOperations.put(op.seqNo(), op);
             }
@@ -3838,7 +3758,7 @@ public void testSnapshotDedupOperations() throws Exception {
     public void testCloseSnapshotTwice() throws Exception {
         int numOps = between(0, 10);
         for (int i = 0; i < numOps; i++) {
-            Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 });
+            Translog.Index op = new Translog.Index(randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 });
             translog.add(op);
             if (randomBoolean()) {
                 translog.rollGeneration();
@@ -3912,7 +3832,7 @@ public void testMaxSeqNo() throws Exception {
             Randomness.shuffle(seqNos);
             for (long seqNo : seqNos) {
                 if (frequently()) {
-                    translog.add(new Translog.Index("test", "id", seqNo, primaryTerm.get(), new byte[] { 1 }));
+                    translog.add(new Translog.Index("id", seqNo, primaryTerm.get(), new byte[] { 1 }));
                     maxSeqNoPerGeneration.compute(
                         translog.currentFileGeneration(),
                         (key, existing) -> existing == null ? seqNo : Math.max(existing, seqNo)
@@ -4050,9 +3970,7 @@ public void testSyncConcurrently() throws Exception {
                     int iterations = randomIntBetween(10, 100);
                     for (int i = 0; i < iterations; i++) {
                         List<Translog.Operation> ops = IntStream.range(0, between(1, 10))
-                            .mapToObj(
-                                n -> new Translog.Index("test", "1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 })
-                            )
+                            .mapToObj(n -> new Translog.Index("1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 }))
                             .collect(Collectors.toList());
                         try {
                             Translog.Location location = null;
@@ -4134,7 +4052,7 @@ void syncBeforeRollGeneration() {
             }
         };
         try {
-            translog.add(new Translog.Index("1", "_doc", 1, primaryTerm.get(), new byte[] { 1 }));
+            translog.add(new Translog.Index("1", 1, primaryTerm.get(), new byte[] { 1 }));
             failedToSyncCheckpoint.set(true);
             expectThrows(IOException.class, translog::rollGeneration);
             final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, translog::rollGeneration);
diff --git a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java
index 2c32e419b27db..c68ad7eaba82e 100644
--- a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java
+++ b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java
@@ -367,7 +367,7 @@ public void testThrottling() throws Exception {
     public void testTranslogRecoveryWorksWithIMC() throws IOException {
         IndexShard shard = newStartedShard(true);
         for (int i = 0; i < 100; i++) {
-            indexDoc(shard, "_doc", Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null);
+            indexDoc(shard, Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null);
         }
         shard.close("simon says", false);
         AtomicReference<IndexShard> shardRef = new AtomicReference<>();
diff --git a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java
index 501253bee27f9..e54f06937cad3 100644
--- a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java
+++ b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java
@@ -185,7 +185,7 @@ private SeqNoStats populateRandomData(IndexShard shard) throws IOException {
                 shard.getOperationPrimaryTerm(),
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(shard.shardId().getIndexName(), "_doc", UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(shard.shardId().getIndexName(), UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON)
             );
             if (randomInt(100) < 5) {
                 shard.flush(new FlushRequest().waitIfOngoing(true));
diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java
index 3890470f966ca..e713ef5d35f67 100644
--- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java
+++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java
@@ -462,7 +462,6 @@ public void indexTranslogOperations(
     }
 
     private Engine.Index getIndex(final String id) {
-        final String type = "test";
         final ParseContext.Document document = new ParseContext.Document();
         document.add(new TextField("test", "test", Field.Store.YES));
         final Field idField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE);
@@ -478,7 +477,6 @@ private Engine.Index getIndex(final String id) {
             versionField,
             seqID,
             id,
-            type,
             null,
             Arrays.asList(document),
             source,
@@ -1188,10 +1186,9 @@ private static List<Translog.Operation> generateOperations(int numOps) {
             final long seqNo = randomValueOtherThanMany(n -> seqNos.add(n) == false, OpenSearchTestCase::randomNonNegativeLong);
             final Translog.Operation op;
             if (randomBoolean()) {
-                op = new Translog.Index("_doc", "id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1);
+                op = new Translog.Index("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1);
             } else if (randomBoolean()) {
                 op = new Translog.Delete(
-                    "_doc",
                     "id",
                     new Term("_id", Uid.encodeId("id")),
                     seqNo,
diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java
index 54f4a22f3a577..c714bd0eb85a2 100644
--- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java
+++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java
@@ -161,7 +161,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception {
 
             // delete #1
             orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete
-            orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "type", "id");
+            orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id");
             orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment
             // index #0
             orgReplica.applyIndexOperationOnReplica(
@@ -170,7 +170,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception {
                 1,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(indexName, "id", new BytesArray("{}"), XContentType.JSON)
             );
             // index #3
             orgReplica.applyIndexOperationOnReplica(
@@ -179,7 +179,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception {
                 1,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(indexName, "id-3", new BytesArray("{}"), XContentType.JSON)
             );
             // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1.
             orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true));
@@ -190,7 +190,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception {
                 1,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(indexName, "id-2", new BytesArray("{}"), XContentType.JSON)
             );
             orgReplica.sync(); // advance local checkpoint
             orgReplica.updateGlobalCheckpointOnReplica(3L, "test");
@@ -201,7 +201,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception {
                 1,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
                 false,
-                new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)
+                new SourceToParse(indexName, "id-5", new BytesArray("{}"), XContentType.JSON)
             );
 
             if (randomBoolean()) {
@@ -310,13 +310,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception {
             Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary(
                 Versions.MATCH_ANY,
                 VersionType.INTERNAL,
-                new SourceToParse(
-                    primaryShard.shardId().getIndexName(),
-                    "_doc",
-                    Integer.toString(i),
-                    new BytesArray("{}"),
-                    XContentType.JSON
-                ),
+                new SourceToParse(primaryShard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON),
                 SequenceNumbers.UNASSIGNED_SEQ_NO,
                 0,
                 IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP,
diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java
index bbc9a0fdbe309..b2c54492b66d7 100644
--- a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java
+++ b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java
@@ -100,7 +100,6 @@ public void testIndexingWithNoContexts() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -153,7 +152,6 @@ public void testIndexingWithSimpleContexts() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -201,7 +199,6 @@ public void testIndexingWithSimpleNumberContexts() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -249,7 +246,6 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -307,7 +303,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception {
 
         Exception e = expectThrows(
             MapperParsingException.class,
-            () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))
+            () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON))
         );
         assertEquals(
             "contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]",
@@ -341,7 +337,6 @@ public void testIndexingWithContextList() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -387,7 +382,6 @@ public void testIndexingWithMixedTypeContextList() throws Exception {
         ParsedDocument parsedDocument = defaultMapper.parse(
             new SourceToParse(
                 "test",
-                "type1",
                 "1",
                 BytesReference.bytes(
                     jsonBuilder().startObject()
@@ -441,7 +435,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception {
 
         Exception e = expectThrows(
             MapperParsingException.class,
-            () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON))
+            () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON))
         );
         assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage());
     }
@@ -486,7 +480,7 @@ public void testIndexingWithMultipleContexts() throws Exception {
             .endArray()
             .endObject();
         ParsedDocument parsedDocument = defaultMapper.parse(
-            new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)
+            new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)
         );
         IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
         assertContextSuggestFields(fields, 3);
diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java
index 361a6cd543d65..31cc2e73ff2a3 100644
--- a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java
+++ b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java
@@ -83,7 +83,6 @@ public void testIndexingWithNoContexts() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject()
@@ -131,7 +130,6 @@ public void testIndexingWithSimpleContexts() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject()
@@ -177,7 +175,6 @@ public void testIndexingWithContextList() throws Exception {
             .parse(
                 new SourceToParse(
                     "test",
-                    MapperService.SINGLE_MAPPING_NAME,
                     "1",
                     BytesReference.bytes(
                         jsonBuilder().startObject()
@@ -240,7 +237,7 @@ public void testIndexingWithMultipleContexts() throws Exception {
             .endArray()
             .endObject();
         ParsedDocument parsedDocument = mapperService.documentMapper()
-            .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(builder), XContentType.JSON));
+            .parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON));
         IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name());
         assertContextSuggestFields(fields, 3);
     }
diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java
index 69f7bef90d78f..6c382a09b90f3 100644
--- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java
@@ -175,10 +175,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase {
     // A default primary term is used by engine instances created in this test.
     protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L);
 
-    protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException {
-        assertVisibleCount(engine, numDocs, true);
-    }
-
     protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException {
         if (refresh) {
             engine.refresh("test");
@@ -333,14 +329,14 @@ public void tearDown() throws Exception {
         try {
             if (engine != null && engine.isClosed.get() == false) {
                 engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
-                assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
+                assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService());
                 assertNoInFlightDocuments(engine);
                 assertMaxSeqNoInCommitUserData(engine);
                 assertAtMostOneLuceneDocumentPerSequenceNumber(engine);
             }
             if (replicaEngine != null && replicaEngine.isClosed.get() == false) {
                 replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs();
-                assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService("test"));
+                assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService());
                 assertNoInFlightDocuments(replicaEngine);
                 assertMaxSeqNoInCommitUserData(replicaEngine);
                 assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine);
@@ -412,21 +408,11 @@ protected static ParsedDocument testParsedDocument(
         } else {
             document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length));
         }
-        return new ParsedDocument(
-            versionField,
-            seqID,
-            id,
-            "test",
-            routing,
-            Arrays.asList(document),
-            source,
-            XContentType.JSON,
-            mappingUpdate
-        );
+        return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON, mappingUpdate);
     }
 
     public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory() throws Exception {
-        final MapperService mapperService = createMapperService("type");
+        final MapperService mapperService = createMapperService();
         final String nestedMapping = Strings.toString(
             XContentFactory.jsonBuilder()
                 .startObject()
@@ -450,7 +436,7 @@ public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> ne
                 source.endObject();
             }
             source.endObject();
-            return nestedMapper.parse(new SourceToParse("test", "type", docId, BytesReference.bytes(source), XContentType.JSON));
+            return nestedMapper.parse(new SourceToParse("test", docId, BytesReference.bytes(source), XContentType.JSON));
         };
     }
 
@@ -460,7 +446,7 @@ public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> ne
     public static EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() {
         return new EngineConfig.TombstoneDocSupplier() {
             @Override
-            public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
+            public ParsedDocument newDeleteTombstoneDoc(String id) {
                 final ParseContext.Document doc = new ParseContext.Document();
                 Field uidField = new Field(IdFieldMapper.NAME, Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE);
                 doc.add(uidField);
@@ -476,7 +462,6 @@ public ParsedDocument newDeleteTombstoneDoc(String type, String id) {
                     versionField,
                     seqID,
                     id,
-                    type,
                     null,
                     Collections.singletonList(doc),
                     new BytesArray("{}"),
@@ -498,17 +483,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) {
                 doc.add(versionField);
                 BytesRef byteRef = new BytesRef(reason);
                 doc.add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length));
-                return new ParsedDocument(
-                    versionField,
-                    seqID,
-                    null,
-                    null,
-                    null,
-                    Collections.singletonList(doc),
-                    null,
-                    XContentType.JSON,
-                    null
-                );
+                return new ParsedDocument(versionField, seqID, null, null, Collections.singletonList(doc), null, XContentType.JSON, null);
             }
         };
     }
@@ -991,7 +966,7 @@ protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long
     }
 
     protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) {
-        return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
+        return new Engine.Delete(id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0);
     }
 
     protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException {
@@ -1056,7 +1031,6 @@ public static List<Engine.Operation> generateSingleDocHistory(
                 );
             } else {
                 op = new Engine.Delete(
-                    "test",
                     docId,
                     id,
                     forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO,
@@ -1115,7 +1089,6 @@ public List<Engine.Operation> generateHistoryOnReplica(
                     case DELETE:
                         operations.add(
                             new Engine.Delete(
-                                doc.type(),
                                 doc.id(),
                                 EngineTestCase.newUid(doc),
                                 seqNo,
@@ -1478,7 +1451,7 @@ public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings
         }
     }
 
-    public static MapperService createMapperService(String type) throws IOException {
+    public static MapperService createMapperService() throws IOException {
         IndexMetadata indexMetadata = IndexMetadata.builder("test")
             .settings(
                 Settings.builder()
@@ -1486,7 +1459,7 @@ public static MapperService createMapperService(String type) throws IOException
                     .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
                     .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
             )
-            .putMapping(type, "{\"properties\": {}}")
+            .putMapping("{\"properties\": {}}")
             .build();
         MapperService mapperService = MapperTestUtils.newMapperService(
             new NamedXContentRegistry(ClusterModule.getNamedXWriteables()),
diff --git a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java
index afb44caa64987..7dbe2c7381fd8 100644
--- a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java
+++ b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java
@@ -44,7 +44,6 @@
 import org.opensearch.index.mapper.DocumentMapper;
 import org.opensearch.index.mapper.DocumentMapperForType;
 import org.opensearch.index.mapper.MapperService;
-import org.opensearch.index.mapper.Mapping;
 import org.opensearch.index.mapper.RootObjectMapper;
 import org.opensearch.index.mapper.SourceToParse;
 import org.opensearch.index.seqno.SequenceNumbers;
@@ -65,8 +64,6 @@
 public class TranslogHandler implements Engine.TranslogRecoveryRunner {
 
     private final MapperService mapperService;
-    public Mapping mappingUpdate = null;
-    private final Map<String, Mapping> recoveredTypes = new HashMap<>();
 
     private final AtomicLong appliedOperations = new AtomicLong();
 
@@ -95,21 +92,13 @@ public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings ind
     private DocumentMapperForType docMapper(String type) {
         RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(type);
         DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService);
-        return new DocumentMapperForType(b.build(mapperService), mappingUpdate);
+        return new DocumentMapperForType(b.build(mapperService), null);
     }
 
     private void applyOperation(Engine engine, Engine.Operation operation) throws IOException {
         switch (operation.operationType()) {
             case INDEX:
-                Engine.Index engineIndex = (Engine.Index) operation;
-                Mapping update = engineIndex.parsedDoc().dynamicMappingsUpdate();
-                if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) {
-                    recoveredTypes.compute(
-                        engineIndex.type(),
-                        (k, mapping) -> mapping == null ? update : mapping.merge(update, MapperService.MergeReason.MAPPING_RECOVERY)
-                    );
-                }
-                engine.index(engineIndex);
+                engine.index((Engine.Index) operation);
                 break;
             case DELETE:
                 engine.delete((Engine.Delete) operation);
@@ -122,13 +111,6 @@ private void applyOperation(Engine engine, Engine.Operation operation) throws IO
         }
     }
 
-    /**
-     * Returns the recovered types modifying the mapping during the recovery
-     */
-    public Map<String, Mapping> getRecoveredTypes() {
-        return recoveredTypes;
-    }
-
     @Override
     public int run(Engine engine, Translog.Snapshot snapshot) throws IOException {
         int opsRecovered = 0;
@@ -150,15 +132,8 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O
                 final Translog.Index index = (Translog.Index) operation;
                 final String indexName = mapperService.index().getName();
                 final Engine.Index engineIndex = IndexShard.prepareIndex(
-                    docMapper(index.type()),
-                    new SourceToParse(
-                        indexName,
-                        index.type(),
-                        index.id(),
-                        index.source(),
-                        XContentHelper.xContentType(index.source()),
-                        index.routing()
-                    ),
+                    docMapper(MapperService.SINGLE_MAPPING_NAME),
+                    new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()),
                     index.seqNo(),
                     index.primaryTerm(),
                     index.version(),
@@ -173,7 +148,6 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O
             case DELETE:
                 final Translog.Delete delete = (Translog.Delete) operation;
                 final Engine.Delete engineDelete = new Engine.Delete(
-                    delete.type(),
                     delete.id(),
                     delete.uid(),
                     delete.seqNo(),
diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java
index fa0309ef165d4..03ac664da1734 100644
--- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java
@@ -183,11 +183,11 @@ protected final SourceToParse source(CheckedConsumer<XContentBuilder, IOExceptio
         XContentBuilder builder = JsonXContent.contentBuilder().startObject();
         build.accept(builder);
         builder.endObject();
-        return new SourceToParse("test", "_doc", "1", BytesReference.bytes(builder), XContentType.JSON);
+        return new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON);
     }
 
     protected final SourceToParse source(String source) {
-        return new SourceToParse("test", "_doc", "1", new BytesArray(source), XContentType.JSON);
+        return new SourceToParse("test", "1", new BytesArray(source), XContentType.JSON);
     }
 
     /**
diff --git a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
index dfc34add3863a..6c8e5a8c0a10f 100644
--- a/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/replication/OpenSearchIndexLevelReplicationTestCase.java
@@ -85,6 +85,7 @@
 import org.opensearch.index.engine.EngineConfigFactory;
 import org.opensearch.index.engine.EngineFactory;
 import org.opensearch.index.engine.InternalEngineFactory;
+import org.opensearch.index.mapper.MapperService;
 import org.opensearch.index.seqno.GlobalCheckpointSyncAction;
 import org.opensearch.index.seqno.RetentionLease;
 import org.opensearch.index.seqno.RetentionLeaseSyncAction;
@@ -108,7 +109,6 @@
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.Future;
@@ -128,7 +128,7 @@ public abstract class OpenSearchIndexLevelReplicationTestCase extends IndexShard
 
     protected final Index index = new Index("test", "uuid");
     private final ShardId shardId = new ShardId(index, 0);
-    protected final Map<String, String> indexMapping = Collections.singletonMap("type", "{ \"type\": {} }");
+    protected final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }";
 
     protected ReplicationGroup createGroup(int replicas) throws IOException {
         return createGroup(replicas, Settings.EMPTY);
@@ -143,11 +143,11 @@ protected IndexMetadata buildIndexMetadata(int replicas) throws IOException {
         return buildIndexMetadata(replicas, indexMapping);
     }
 
-    protected IndexMetadata buildIndexMetadata(int replicas, Map<String, String> mappings) throws IOException {
+    protected IndexMetadata buildIndexMetadata(int replicas, String mappings) throws IOException {
         return buildIndexMetadata(replicas, Settings.EMPTY, mappings);
     }
 
-    protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, Map<String, String> mappings) throws IOException {
+    protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, String mappings) throws IOException {
         Settings settings = Settings.builder()
             .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
             .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, replicas)
@@ -155,10 +155,11 @@ protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings,
             .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000))
             .put(indexSettings)
             .build();
-        IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()).settings(settings).primaryTerm(0, randomIntBetween(1, 100));
-        for (Map.Entry<String, String> typeMapping : mappings.entrySet()) {
-            metadata.putMapping(typeMapping.getKey(), typeMapping.getValue());
-        }
+        IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName())
+            .settings(settings)
+            .putMapping(mappings)
+            .primaryTerm(0, randomIntBetween(1, 100));
+
         return metadata.build();
     }
 
diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
index 09c5dfad486e9..6b18963056450 100644
--- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
+++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java
@@ -279,7 +279,7 @@ protected IndexShard newShard(
         IndexMetadata.Builder metadata = IndexMetadata.builder(shardRouting.getIndexName())
             .settings(indexSettings)
             .primaryTerm(0, primaryTerm)
-            .putMapping("_doc", "{ \"properties\": {} }");
+            .putMapping("{ \"properties\": {} }");
         return newShard(shardRouting, metadata.build(), null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners);
     }
 
@@ -877,25 +877,12 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id)
     }
 
     protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source) throws IOException {
-        return indexDoc(shard, type, id, source, XContentType.JSON, null);
+        return indexDoc(shard, id, source, XContentType.JSON, null);
     }
 
-    protected Engine.IndexResult indexDoc(
-        IndexShard shard,
-        String type,
-        String id,
-        String source,
-        XContentType xContentType,
-        String routing
-    ) throws IOException {
-        SourceToParse sourceToParse = new SourceToParse(
-            shard.shardId().getIndexName(),
-            type,
-            id,
-            new BytesArray(source),
-            xContentType,
-            routing
-        );
+    protected Engine.IndexResult indexDoc(IndexShard shard, String id, String source, XContentType xContentType, String routing)
+        throws IOException {
+        SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), id, new BytesArray(source), xContentType, routing);
         Engine.IndexResult result;
         if (shard.routingEntry().primary()) {
             result = shard.applyIndexOperationOnPrimary(
@@ -911,7 +898,7 @@ protected Engine.IndexResult indexDoc(
                 updateMappings(
                     shard,
                     IndexMetadata.builder(shard.indexSettings().getIndexMetadata())
-                        .putMapping(type, result.getRequiredMappingUpdate().toString())
+                        .putMapping(result.getRequiredMappingUpdate().toString())
                         .build()
                 );
                 result = shard.applyIndexOperationOnPrimary(
@@ -956,12 +943,11 @@ protected void updateMappings(IndexShard shard, IndexMetadata indexMetadata) {
             );
     }
 
-    protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException {
+    protected Engine.DeleteResult deleteDoc(IndexShard shard, String id) throws IOException {
         final Engine.DeleteResult result;
         if (shard.routingEntry().primary()) {
             result = shard.applyDeleteOperationOnPrimary(
                 Versions.MATCH_ANY,
-                type,
                 id,
                 VersionType.INTERNAL,
                 SequenceNumbers.UNASSIGNED_SEQ_NO,
@@ -972,7 +958,7 @@ protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id
         } else {
             final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1;
             shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates
-            result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, type, id);
+            result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, id);
             shard.sync(); // advance local checkpoint
         }
         return result;