diff --git a/src/coreclr/inc/clrconfigvalues.h b/src/coreclr/inc/clrconfigvalues.h index 7f74a4fefcc10c..78a9400e8383ac 100644 --- a/src/coreclr/inc/clrconfigvalues.h +++ b/src/coreclr/inc/clrconfigvalues.h @@ -771,6 +771,7 @@ RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVX512F_VL, W("EnableAVX512F RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVX512VBMI, W("EnableAVX512VBMI"), 1, "Allows AVX512VBMI+ hardware intrinsics to be disabled") RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVX512VBMI_VL, W("EnableAVX512VBMI_VL"), 1, "Allows AVX512VBMI_VL+ hardware intrinsics to be disabled") RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVX10v1, W("EnableAVX10v1"), 1, "Allows AVX10v1+ hardware intrinsics to be disabled") +RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVX10v2, W("EnableAVX10v2"), 1, "Allows AVX10v2+ hardware intrinsics to be disabled") RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableAVXVNNI, W("EnableAVXVNNI"), 1, "Allows AVXVNNI+ hardware intrinsics to be disabled") RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableBMI1, W("EnableBMI1"), 1, "Allows BMI1+ hardware intrinsics to be disabled") RETAIL_CONFIG_DWORD_INFO(EXTERNAL_EnableBMI2, W("EnableBMI2"), 1, "Allows BMI2+ hardware intrinsics to be disabled") diff --git a/src/coreclr/inc/corinfoinstructionset.h b/src/coreclr/inc/corinfoinstructionset.h index 90e9824f179b4a..a4b6846c3d420e 100644 --- a/src/coreclr/inc/corinfoinstructionset.h +++ b/src/coreclr/inc/corinfoinstructionset.h @@ -84,31 +84,35 @@ enum CORINFO_InstructionSet InstructionSet_VectorT256=39, InstructionSet_VectorT512=40, InstructionSet_APX=41, - InstructionSet_X86Base_X64=42, - InstructionSet_SSE_X64=43, - InstructionSet_SSE2_X64=44, - InstructionSet_SSE3_X64=45, - InstructionSet_SSSE3_X64=46, - InstructionSet_SSE41_X64=47, - InstructionSet_SSE42_X64=48, - InstructionSet_AVX_X64=49, - InstructionSet_AVX2_X64=50, - InstructionSet_AES_X64=51, - InstructionSet_BMI1_X64=52, - InstructionSet_BMI2_X64=53, - InstructionSet_FMA_X64=54, - InstructionSet_LZCNT_X64=55, - InstructionSet_PCLMULQDQ_X64=56, - InstructionSet_POPCNT_X64=57, - InstructionSet_AVXVNNI_X64=58, - InstructionSet_X86Serialize_X64=59, - InstructionSet_AVX512F_X64=60, - InstructionSet_AVX512BW_X64=61, - InstructionSet_AVX512CD_X64=62, - InstructionSet_AVX512DQ_X64=63, - InstructionSet_AVX512VBMI_X64=64, - InstructionSet_AVX10v1_X64=65, - InstructionSet_AVX10v1_V512_X64=66, + InstructionSet_AVX10v2=42, + InstructionSet_AVX10v2_V512=43, + InstructionSet_X86Base_X64=44, + InstructionSet_SSE_X64=45, + InstructionSet_SSE2_X64=46, + InstructionSet_SSE3_X64=47, + InstructionSet_SSSE3_X64=48, + InstructionSet_SSE41_X64=49, + InstructionSet_SSE42_X64=50, + InstructionSet_AVX_X64=51, + InstructionSet_AVX2_X64=52, + InstructionSet_AES_X64=53, + InstructionSet_BMI1_X64=54, + InstructionSet_BMI2_X64=55, + InstructionSet_FMA_X64=56, + InstructionSet_LZCNT_X64=57, + InstructionSet_PCLMULQDQ_X64=58, + InstructionSet_POPCNT_X64=59, + InstructionSet_AVXVNNI_X64=60, + InstructionSet_X86Serialize_X64=61, + InstructionSet_AVX512F_X64=62, + InstructionSet_AVX512BW_X64=63, + InstructionSet_AVX512CD_X64=64, + InstructionSet_AVX512DQ_X64=65, + InstructionSet_AVX512VBMI_X64=66, + InstructionSet_AVX10v1_X64=67, + InstructionSet_AVX10v1_V512_X64=68, + InstructionSet_AVX10v2_X64=69, + InstructionSet_AVX10v2_V512_X64=70, #endif // TARGET_AMD64 #ifdef TARGET_X86 InstructionSet_X86Base=1, @@ -152,31 +156,35 @@ enum CORINFO_InstructionSet InstructionSet_VectorT256=39, InstructionSet_VectorT512=40, InstructionSet_APX=41, - InstructionSet_X86Base_X64=42, - InstructionSet_SSE_X64=43, - InstructionSet_SSE2_X64=44, - InstructionSet_SSE3_X64=45, - InstructionSet_SSSE3_X64=46, - InstructionSet_SSE41_X64=47, - InstructionSet_SSE42_X64=48, - InstructionSet_AVX_X64=49, - InstructionSet_AVX2_X64=50, - InstructionSet_AES_X64=51, - InstructionSet_BMI1_X64=52, - InstructionSet_BMI2_X64=53, - InstructionSet_FMA_X64=54, - InstructionSet_LZCNT_X64=55, - InstructionSet_PCLMULQDQ_X64=56, - InstructionSet_POPCNT_X64=57, - InstructionSet_AVXVNNI_X64=58, - InstructionSet_X86Serialize_X64=59, - InstructionSet_AVX512F_X64=60, - InstructionSet_AVX512BW_X64=61, - InstructionSet_AVX512CD_X64=62, - InstructionSet_AVX512DQ_X64=63, - InstructionSet_AVX512VBMI_X64=64, - InstructionSet_AVX10v1_X64=65, - InstructionSet_AVX10v1_V512_X64=66, + InstructionSet_AVX10v2=42, + InstructionSet_AVX10v2_V512=43, + InstructionSet_X86Base_X64=44, + InstructionSet_SSE_X64=45, + InstructionSet_SSE2_X64=46, + InstructionSet_SSE3_X64=47, + InstructionSet_SSSE3_X64=48, + InstructionSet_SSE41_X64=49, + InstructionSet_SSE42_X64=50, + InstructionSet_AVX_X64=51, + InstructionSet_AVX2_X64=52, + InstructionSet_AES_X64=53, + InstructionSet_BMI1_X64=54, + InstructionSet_BMI2_X64=55, + InstructionSet_FMA_X64=56, + InstructionSet_LZCNT_X64=57, + InstructionSet_PCLMULQDQ_X64=58, + InstructionSet_POPCNT_X64=59, + InstructionSet_AVXVNNI_X64=60, + InstructionSet_X86Serialize_X64=61, + InstructionSet_AVX512F_X64=62, + InstructionSet_AVX512BW_X64=63, + InstructionSet_AVX512CD_X64=64, + InstructionSet_AVX512DQ_X64=65, + InstructionSet_AVX512VBMI_X64=66, + InstructionSet_AVX10v1_X64=67, + InstructionSet_AVX10v1_V512_X64=68, + InstructionSet_AVX10v2_X64=69, + InstructionSet_AVX10v2_V512_X64=70, #endif // TARGET_X86 }; @@ -342,6 +350,10 @@ struct CORINFO_InstructionSetFlags AddInstructionSet(InstructionSet_AVX10v1_X64); if (HasInstructionSet(InstructionSet_AVX10v1_V512)) AddInstructionSet(InstructionSet_AVX10v1_V512_X64); + if (HasInstructionSet(InstructionSet_AVX10v2)) + AddInstructionSet(InstructionSet_AVX10v2_X64); + if (HasInstructionSet(InstructionSet_AVX10v2_V512)) + AddInstructionSet(InstructionSet_AVX10v2_V512_X64); #endif // TARGET_AMD64 #ifdef TARGET_X86 #endif // TARGET_X86 @@ -522,6 +534,14 @@ inline CORINFO_InstructionSetFlags EnsureInstructionSetFlagsAreValid(CORINFO_Ins resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512_X64) && !resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512)) resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512_X64); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2) && !resultflags.HasInstructionSet(InstructionSet_AVX10v2_X64)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2_X64) && !resultflags.HasInstructionSet(InstructionSet_AVX10v2)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2_X64); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512) && !resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512_X64)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2_V512); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512_X64) && !resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2_V512_X64); if (resultflags.HasInstructionSet(InstructionSet_SSE) && !resultflags.HasInstructionSet(InstructionSet_X86Base)) resultflags.RemoveInstructionSet(InstructionSet_SSE); if (resultflags.HasInstructionSet(InstructionSet_SSE2) && !resultflags.HasInstructionSet(InstructionSet_SSE)) @@ -622,6 +642,10 @@ inline CORINFO_InstructionSetFlags EnsureInstructionSetFlagsAreValid(CORINFO_Ins resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512) && !resultflags.HasInstructionSet(InstructionSet_AVX512VBMI_VL)) resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2) && !resultflags.HasInstructionSet(InstructionSet_AVX10v1)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512) && !resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet_Vector128) && !resultflags.HasInstructionSet(InstructionSet_SSE)) resultflags.RemoveInstructionSet(InstructionSet_Vector128); if (resultflags.HasInstructionSet(InstructionSet_Vector256) && !resultflags.HasInstructionSet(InstructionSet_AVX)) @@ -736,6 +760,10 @@ inline CORINFO_InstructionSetFlags EnsureInstructionSetFlagsAreValid(CORINFO_Ins resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512) && !resultflags.HasInstructionSet(InstructionSet_AVX512VBMI_VL)) resultflags.RemoveInstructionSet(InstructionSet_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2) && !resultflags.HasInstructionSet(InstructionSet_AVX10v1)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet_AVX10v2_V512) && !resultflags.HasInstructionSet(InstructionSet_AVX10v1_V512)) + resultflags.RemoveInstructionSet(InstructionSet_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet_Vector128) && !resultflags.HasInstructionSet(InstructionSet_SSE)) resultflags.RemoveInstructionSet(InstructionSet_Vector128); if (resultflags.HasInstructionSet(InstructionSet_Vector256) && !resultflags.HasInstructionSet(InstructionSet_AVX)) @@ -948,6 +976,14 @@ inline const char *InstructionSetToString(CORINFO_InstructionSet instructionSet) return "VectorT512"; case InstructionSet_APX : return "APX"; + case InstructionSet_AVX10v2 : + return "AVX10v2"; + case InstructionSet_AVX10v2_X64 : + return "AVX10v2_X64"; + case InstructionSet_AVX10v2_V512 : + return "AVX10v2_V512"; + case InstructionSet_AVX10v2_V512_X64 : + return "AVX10v2_V512_X64"; #endif // TARGET_AMD64 #ifdef TARGET_X86 case InstructionSet_X86Base : @@ -1032,6 +1068,10 @@ inline const char *InstructionSetToString(CORINFO_InstructionSet instructionSet) return "VectorT512"; case InstructionSet_APX : return "APX"; + case InstructionSet_AVX10v2 : + return "AVX10v2"; + case InstructionSet_AVX10v2_V512 : + return "AVX10v2_V512"; #endif // TARGET_X86 default: @@ -1105,6 +1145,8 @@ inline CORINFO_InstructionSet InstructionSetFromR2RInstructionSet(ReadyToRunInst case READYTORUN_INSTRUCTION_VectorT256: return InstructionSet_VectorT256; case READYTORUN_INSTRUCTION_VectorT512: return InstructionSet_VectorT512; case READYTORUN_INSTRUCTION_Apx: return InstructionSet_APX; + case READYTORUN_INSTRUCTION_Avx10v2: return InstructionSet_AVX10v2; + case READYTORUN_INSTRUCTION_Avx10v2_V512: return InstructionSet_AVX10v2_V512; #endif // TARGET_AMD64 #ifdef TARGET_X86 case READYTORUN_INSTRUCTION_X86Base: return InstructionSet_X86Base; @@ -1145,6 +1187,8 @@ inline CORINFO_InstructionSet InstructionSetFromR2RInstructionSet(ReadyToRunInst case READYTORUN_INSTRUCTION_VectorT256: return InstructionSet_VectorT256; case READYTORUN_INSTRUCTION_VectorT512: return InstructionSet_VectorT512; case READYTORUN_INSTRUCTION_Apx: return InstructionSet_APX; + case READYTORUN_INSTRUCTION_Avx10v2: return InstructionSet_AVX10v2; + case READYTORUN_INSTRUCTION_Avx10v2_V512: return InstructionSet_AVX10v2_V512; #endif // TARGET_X86 default: diff --git a/src/coreclr/inc/jiteeversionguid.h b/src/coreclr/inc/jiteeversionguid.h index 7f4ed543df2f56..2f0b67174805a3 100644 --- a/src/coreclr/inc/jiteeversionguid.h +++ b/src/coreclr/inc/jiteeversionguid.h @@ -43,11 +43,11 @@ typedef const GUID *LPCGUID; #define GUID_DEFINED #endif // !GUID_DEFINED -constexpr GUID JITEEVersionIdentifier = { /* 9014d652-5dc7-4edf-9285-6644d0898fb5 */ - 0x9014d652, - 0x5dc7, - 0x4edf, - {0x92, 0x85, 0x66, 0x44, 0xd0, 0x89, 0x8f, 0xb5} +constexpr GUID JITEEVersionIdentifier = { /* 9ed85c09-d33d-4855-80ea-e3b7330e8173 */ + 0x9ed85c09, + 0xd33d, + 0x4855, + {0x80, 0xea, 0xe3, 0xb7, 0x33, 0x0e, 0x81, 0x73} }; ////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/coreclr/inc/readytoruninstructionset.h b/src/coreclr/inc/readytoruninstructionset.h index 2da697a62394f0..6b7256163b77c7 100644 --- a/src/coreclr/inc/readytoruninstructionset.h +++ b/src/coreclr/inc/readytoruninstructionset.h @@ -58,6 +58,8 @@ enum ReadyToRunInstructionSet READYTORUN_INSTRUCTION_Apx=48, READYTORUN_INSTRUCTION_Pclmulqdq_V256=49, READYTORUN_INSTRUCTION_Pclmulqdq_V512=50, + READYTORUN_INSTRUCTION_Avx10v2=51, + READYTORUN_INSTRUCTION_Avx10v2_V512=52, }; diff --git a/src/coreclr/jit/hwintrinsic.cpp b/src/coreclr/jit/hwintrinsic.cpp index 84f5c01920fe65..e598d774d9dd9f 100644 --- a/src/coreclr/jit/hwintrinsic.cpp +++ b/src/coreclr/jit/hwintrinsic.cpp @@ -807,6 +807,8 @@ static const HWIntrinsicIsaRange hwintrinsicIsaRangeArray[] = { { NI_Illegal, NI_Illegal }, // VectorT256 { NI_Illegal, NI_Illegal }, // VectorT512 { NI_Illegal, NI_Illegal }, // APX + { NI_Illegal, NI_Illegal }, // AVX10v2 + { NI_Illegal, NI_Illegal }, // AVX10v2_V512 { FIRST_NI_X86Base_X64, LAST_NI_X86Base_X64 }, { FIRST_NI_SSE_X64, LAST_NI_SSE_X64 }, { FIRST_NI_SSE2_X64, LAST_NI_SSE2_X64 }, @@ -832,6 +834,8 @@ static const HWIntrinsicIsaRange hwintrinsicIsaRangeArray[] = { { NI_Illegal, NI_Illegal }, // AVX512VBMI_X64 { FIRST_NI_AVX10v1_X64, LAST_NI_AVX10v1_X64 }, { NI_Illegal, NI_Illegal }, // AVX10v1_V512_X64 + { NI_Illegal, NI_Illegal }, // AVX10v2_X64 + { NI_Illegal, NI_Illegal }, // AVX10v2_V512_X64 #elif defined (TARGET_ARM64) { FIRST_NI_ArmBase, LAST_NI_ArmBase }, { FIRST_NI_AdvSimd, LAST_NI_AdvSimd }, diff --git a/src/coreclr/jit/jitconfigvalues.h b/src/coreclr/jit/jitconfigvalues.h index c964554667abf2..36162934bdf1bb 100644 --- a/src/coreclr/jit/jitconfigvalues.h +++ b/src/coreclr/jit/jitconfigvalues.h @@ -400,6 +400,7 @@ RELEASE_CONFIG_INTEGER(EnableAVX512F_VL, "EnableAVX512F_VL", RELEASE_CONFIG_INTEGER(EnableAVX512VBMI, "EnableAVX512VBMI", 1) // Allows AVX512VBMI+ hardware intrinsics to be disabled RELEASE_CONFIG_INTEGER(EnableAVX512VBMI_VL, "EnableAVX512VBMI_VL", 1) // Allows AVX512VBMI_VL+ hardware intrinsics to be disabled RELEASE_CONFIG_INTEGER(EnableAVX10v1, "EnableAVX10v1", 1) // Allows AVX10v1+ hardware intrinsics to be disabled +RELEASE_CONFIG_INTEGER(EnableAVX10v2, "EnableAVX10v2", 1) // Allows AVX10v2+ hardware intrinsics to be disabled RELEASE_CONFIG_INTEGER(EnableAVXVNNI, "EnableAVXVNNI", 1) // Allows AVXVNNI+ hardware intrinsics to be disabled RELEASE_CONFIG_INTEGER(EnableBMI1, "EnableBMI1", 1) // Allows BMI1+ hardware intrinsics to be disabled RELEASE_CONFIG_INTEGER(EnableBMI2, "EnableBMI2", 1) // Allows BMI2+ hardware intrinsics to be disabled diff --git a/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs b/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs index 3d0e4ba886bb42..119cf6a1e6cff7 100644 --- a/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs +++ b/src/coreclr/tools/Common/Compiler/HardwareIntrinsicHelpers.cs @@ -80,6 +80,7 @@ private static class XArchIntrinsicConstants public const int Evex = 0x80000; public const int Apx = 0x100000; public const int Vpclmulqdq = 0x200000; + public const int Avx10v2 = 0x400000; public static void AddToBuilder(InstructionSetSupportBuilder builder, int flags) { @@ -145,6 +146,10 @@ public static void AddToBuilder(InstructionSetSupportBuilder builder, int flags) if ((flags & Avx512) != 0) builder.AddSupportedInstructionSet("vpclmul_v512"); } + if ((flags & Avx10v2) != 0) + builder.AddSupportedInstructionSet("avx10v2"); + if (((flags & Avx10v2) != 0) && ((flags & Avx512) != 0)) + builder.AddSupportedInstructionSet("avx10v2_v512"); } public static int FromInstructionSet(InstructionSet instructionSet) @@ -210,6 +215,10 @@ public static int FromInstructionSet(InstructionSet instructionSet) InstructionSet.X64_APX => Apx, InstructionSet.X64_PCLMULQDQ_V256 => Vpclmulqdq, InstructionSet.X64_PCLMULQDQ_V512 => (Vpclmulqdq | Avx512), + InstructionSet.X64_AVX10v2 => Avx10v2, + InstructionSet.X64_AVX10v2_X64 => Avx10v2, + InstructionSet.X64_AVX10v2_V512 => (Avx10v2 | Avx512), + InstructionSet.X64_AVX10v2_V512_X64 => (Avx10v2 | Avx512), // Baseline ISAs - they're always available InstructionSet.X64_SSE => 0, diff --git a/src/coreclr/tools/Common/Compiler/InstructionSetSupport.cs b/src/coreclr/tools/Common/Compiler/InstructionSetSupport.cs index 6afe17d2c1cdc5..0c45280a51cca9 100644 --- a/src/coreclr/tools/Common/Compiler/InstructionSetSupport.cs +++ b/src/coreclr/tools/Common/Compiler/InstructionSetSupport.cs @@ -360,6 +360,11 @@ public bool ComputeInstructionSetFlags(int maxVectorTBitWidth, if (_supportedInstructionSets.Contains("vpclmul")) _supportedInstructionSets.Add("vpclmul_v512"); + + // Having AVX10V2 and any AVX-512 instruction sets enabled, + // automatically implies AVX10V2-V512 as well. + if (_supportedInstructionSets.Contains("avx10v2")) + _supportedInstructionSets.Add("avx10v2_v512"); } foreach (string supported in _supportedInstructionSets) diff --git a/src/coreclr/tools/Common/InstructionSetHelpers.cs b/src/coreclr/tools/Common/InstructionSetHelpers.cs index 95811afd7fbcdb..d412ee55988810 100644 --- a/src/coreclr/tools/Common/InstructionSetHelpers.cs +++ b/src/coreclr/tools/Common/InstructionSetHelpers.cs @@ -228,6 +228,8 @@ public static InstructionSetSupport ConfigureInstructionSetSupport(string instru optimisticInstructionSetSupportBuilder.AddSupportedInstructionSet("avx10v1"); optimisticInstructionSetSupportBuilder.AddSupportedInstructionSet("avx10v1_v512"); optimisticInstructionSetSupportBuilder.AddSupportedInstructionSet("vpclmul_v512"); + optimisticInstructionSetSupportBuilder.AddSupportedInstructionSet("avx10v2"); + optimisticInstructionSetSupportBuilder.AddSupportedInstructionSet("avx10v2_v512"); } } else if (targetArchitecture == TargetArchitecture.ARM64) diff --git a/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSet.cs b/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSet.cs index eab9b4584433e9..afcfd808bffac8 100644 --- a/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSet.cs +++ b/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSet.cs @@ -61,6 +61,8 @@ public enum ReadyToRunInstructionSet Apx=48, Pclmulqdq_V256=49, Pclmulqdq_V512=50, + Avx10v2=51, + Avx10v2_V512=52, } } diff --git a/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSetHelper.cs b/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSetHelper.cs index 9e97bc9dc991a6..9c4042e255b41e 100644 --- a/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSetHelper.cs +++ b/src/coreclr/tools/Common/Internal/Runtime/ReadyToRunInstructionSetHelper.cs @@ -123,6 +123,10 @@ public static class ReadyToRunInstructionSetHelper case InstructionSet.X64_VectorT256: return ReadyToRunInstructionSet.VectorT256; case InstructionSet.X64_VectorT512: return ReadyToRunInstructionSet.VectorT512; case InstructionSet.X64_APX: return ReadyToRunInstructionSet.Apx; + case InstructionSet.X64_AVX10v2: return ReadyToRunInstructionSet.Avx10v2; + case InstructionSet.X64_AVX10v2_X64: return ReadyToRunInstructionSet.Avx10v2; + case InstructionSet.X64_AVX10v2_V512: return ReadyToRunInstructionSet.Avx10v2_V512; + case InstructionSet.X64_AVX10v2_V512_X64: return ReadyToRunInstructionSet.Avx10v2_V512; default: throw new Exception("Unknown instruction set"); } @@ -198,6 +202,10 @@ public static class ReadyToRunInstructionSetHelper case InstructionSet.X86_VectorT256: return ReadyToRunInstructionSet.VectorT256; case InstructionSet.X86_VectorT512: return ReadyToRunInstructionSet.VectorT512; case InstructionSet.X86_APX: return ReadyToRunInstructionSet.Apx; + case InstructionSet.X86_AVX10v2: return ReadyToRunInstructionSet.Avx10v2; + case InstructionSet.X86_AVX10v2_X64: return null; + case InstructionSet.X86_AVX10v2_V512: return ReadyToRunInstructionSet.Avx10v2_V512; + case InstructionSet.X86_AVX10v2_V512_X64: return null; default: throw new Exception("Unknown instruction set"); } diff --git a/src/coreclr/tools/Common/JitInterface/CorInfoInstructionSet.cs b/src/coreclr/tools/Common/JitInterface/CorInfoInstructionSet.cs index 7c3d68eedad6bf..18d9c54a11ec49 100644 --- a/src/coreclr/tools/Common/JitInterface/CorInfoInstructionSet.cs +++ b/src/coreclr/tools/Common/JitInterface/CorInfoInstructionSet.cs @@ -82,6 +82,8 @@ public enum InstructionSet X64_VectorT256 = InstructionSet_X64.VectorT256, X64_VectorT512 = InstructionSet_X64.VectorT512, X64_APX = InstructionSet_X64.APX, + X64_AVX10v2 = InstructionSet_X64.AVX10v2, + X64_AVX10v2_V512 = InstructionSet_X64.AVX10v2_V512, X64_X86Base_X64 = InstructionSet_X64.X86Base_X64, X64_SSE_X64 = InstructionSet_X64.SSE_X64, X64_SSE2_X64 = InstructionSet_X64.SSE2_X64, @@ -107,6 +109,8 @@ public enum InstructionSet X64_AVX512VBMI_X64 = InstructionSet_X64.AVX512VBMI_X64, X64_AVX10v1_X64 = InstructionSet_X64.AVX10v1_X64, X64_AVX10v1_V512_X64 = InstructionSet_X64.AVX10v1_V512_X64, + X64_AVX10v2_X64 = InstructionSet_X64.AVX10v2_X64, + X64_AVX10v2_V512_X64 = InstructionSet_X64.AVX10v2_V512_X64, X86_X86Base = InstructionSet_X86.X86Base, X86_SSE = InstructionSet_X86.SSE, X86_SSE2 = InstructionSet_X86.SSE2, @@ -148,6 +152,8 @@ public enum InstructionSet X86_VectorT256 = InstructionSet_X86.VectorT256, X86_VectorT512 = InstructionSet_X86.VectorT512, X86_APX = InstructionSet_X86.APX, + X86_AVX10v2 = InstructionSet_X86.AVX10v2, + X86_AVX10v2_V512 = InstructionSet_X86.AVX10v2_V512, X86_X86Base_X64 = InstructionSet_X86.X86Base_X64, X86_SSE_X64 = InstructionSet_X86.SSE_X64, X86_SSE2_X64 = InstructionSet_X86.SSE2_X64, @@ -173,6 +179,8 @@ public enum InstructionSet X86_AVX512VBMI_X64 = InstructionSet_X86.AVX512VBMI_X64, X86_AVX10v1_X64 = InstructionSet_X86.AVX10v1_X64, X86_AVX10v1_V512_X64 = InstructionSet_X86.AVX10v1_V512_X64, + X86_AVX10v2_X64 = InstructionSet_X86.AVX10v2_X64, + X86_AVX10v2_V512_X64 = InstructionSet_X86.AVX10v2_V512_X64, } public enum InstructionSet_ARM64 { @@ -250,31 +258,35 @@ public enum InstructionSet_X64 VectorT256 = 39, VectorT512 = 40, APX = 41, - X86Base_X64 = 42, - SSE_X64 = 43, - SSE2_X64 = 44, - SSE3_X64 = 45, - SSSE3_X64 = 46, - SSE41_X64 = 47, - SSE42_X64 = 48, - AVX_X64 = 49, - AVX2_X64 = 50, - AES_X64 = 51, - BMI1_X64 = 52, - BMI2_X64 = 53, - FMA_X64 = 54, - LZCNT_X64 = 55, - PCLMULQDQ_X64 = 56, - POPCNT_X64 = 57, - AVXVNNI_X64 = 58, - X86Serialize_X64 = 59, - AVX512F_X64 = 60, - AVX512BW_X64 = 61, - AVX512CD_X64 = 62, - AVX512DQ_X64 = 63, - AVX512VBMI_X64 = 64, - AVX10v1_X64 = 65, - AVX10v1_V512_X64 = 66, + AVX10v2 = 42, + AVX10v2_V512 = 43, + X86Base_X64 = 44, + SSE_X64 = 45, + SSE2_X64 = 46, + SSE3_X64 = 47, + SSSE3_X64 = 48, + SSE41_X64 = 49, + SSE42_X64 = 50, + AVX_X64 = 51, + AVX2_X64 = 52, + AES_X64 = 53, + BMI1_X64 = 54, + BMI2_X64 = 55, + FMA_X64 = 56, + LZCNT_X64 = 57, + PCLMULQDQ_X64 = 58, + POPCNT_X64 = 59, + AVXVNNI_X64 = 60, + X86Serialize_X64 = 61, + AVX512F_X64 = 62, + AVX512BW_X64 = 63, + AVX512CD_X64 = 64, + AVX512DQ_X64 = 65, + AVX512VBMI_X64 = 66, + AVX10v1_X64 = 67, + AVX10v1_V512_X64 = 68, + AVX10v2_X64 = 69, + AVX10v2_V512_X64 = 70, } public enum InstructionSet_X86 @@ -322,31 +334,35 @@ public enum InstructionSet_X86 VectorT256 = 39, VectorT512 = 40, APX = 41, - X86Base_X64 = 42, - SSE_X64 = 43, - SSE2_X64 = 44, - SSE3_X64 = 45, - SSSE3_X64 = 46, - SSE41_X64 = 47, - SSE42_X64 = 48, - AVX_X64 = 49, - AVX2_X64 = 50, - AES_X64 = 51, - BMI1_X64 = 52, - BMI2_X64 = 53, - FMA_X64 = 54, - LZCNT_X64 = 55, - PCLMULQDQ_X64 = 56, - POPCNT_X64 = 57, - AVXVNNI_X64 = 58, - X86Serialize_X64 = 59, - AVX512F_X64 = 60, - AVX512BW_X64 = 61, - AVX512CD_X64 = 62, - AVX512DQ_X64 = 63, - AVX512VBMI_X64 = 64, - AVX10v1_X64 = 65, - AVX10v1_V512_X64 = 66, + AVX10v2 = 42, + AVX10v2_V512 = 43, + X86Base_X64 = 44, + SSE_X64 = 45, + SSE2_X64 = 46, + SSE3_X64 = 47, + SSSE3_X64 = 48, + SSE41_X64 = 49, + SSE42_X64 = 50, + AVX_X64 = 51, + AVX2_X64 = 52, + AES_X64 = 53, + BMI1_X64 = 54, + BMI2_X64 = 55, + FMA_X64 = 56, + LZCNT_X64 = 57, + PCLMULQDQ_X64 = 58, + POPCNT_X64 = 59, + AVXVNNI_X64 = 60, + X86Serialize_X64 = 61, + AVX512F_X64 = 62, + AVX512BW_X64 = 63, + AVX512CD_X64 = 64, + AVX512DQ_X64 = 65, + AVX512VBMI_X64 = 66, + AVX10v1_X64 = 67, + AVX10v1_V512_X64 = 68, + AVX10v2_X64 = 69, + AVX10v2_V512_X64 = 70, } public unsafe struct InstructionSetFlags : IEnumerable @@ -666,6 +682,14 @@ public static InstructionSetFlags ExpandInstructionSetByImplicationHelper(Target resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512_X64); if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v1_V512_X64)) resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2_X64); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_X64)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_V512)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2_V512_X64); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_V512_X64)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet.X64_SSE)) resultflags.AddInstructionSet(InstructionSet.X64_X86Base); if (resultflags.HasInstructionSet(InstructionSet.X64_SSE2)) @@ -766,6 +790,10 @@ public static InstructionSetFlags ExpandInstructionSetByImplicationHelper(Target resultflags.AddInstructionSet(InstructionSet.X64_AVX512VBMI); if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v1_V512)) resultflags.AddInstructionSet(InstructionSet.X64_AVX512VBMI_VL); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_V512)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet.X64_Vector128)) resultflags.AddInstructionSet(InstructionSet.X64_SSE); if (resultflags.HasInstructionSet(InstructionSet.X64_Vector256)) @@ -881,6 +909,10 @@ public static InstructionSetFlags ExpandInstructionSetByImplicationHelper(Target resultflags.AddInstructionSet(InstructionSet.X86_AVX512VBMI); if (resultflags.HasInstructionSet(InstructionSet.X86_AVX10v1_V512)) resultflags.AddInstructionSet(InstructionSet.X86_AVX512VBMI_VL); + if (resultflags.HasInstructionSet(InstructionSet.X86_AVX10v2)) + resultflags.AddInstructionSet(InstructionSet.X86_AVX10v1); + if (resultflags.HasInstructionSet(InstructionSet.X86_AVX10v2_V512)) + resultflags.AddInstructionSet(InstructionSet.X86_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet.X86_Vector128)) resultflags.AddInstructionSet(InstructionSet.X86_SSE); if (resultflags.HasInstructionSet(InstructionSet.X86_Vector256)) @@ -1009,6 +1041,10 @@ private static InstructionSetFlags ExpandInstructionSetByReverseImplicationHelpe resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1); if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v1_V512_X64)) resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_X64)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v2_V512_X64)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet.X64_X86Base)) resultflags.AddInstructionSet(InstructionSet.X64_SSE); if (resultflags.HasInstructionSet(InstructionSet.X64_SSE)) @@ -1109,6 +1145,10 @@ private static InstructionSetFlags ExpandInstructionSetByReverseImplicationHelpe resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet.X64_AVX512VBMI_VL)) resultflags.AddInstructionSet(InstructionSet.X64_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v1)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet.X64_AVX10v1_V512)) + resultflags.AddInstructionSet(InstructionSet.X64_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet.X64_SSE)) resultflags.AddInstructionSet(InstructionSet.X64_Vector128); if (resultflags.HasInstructionSet(InstructionSet.X64_AVX)) @@ -1224,6 +1264,10 @@ private static InstructionSetFlags ExpandInstructionSetByReverseImplicationHelpe resultflags.AddInstructionSet(InstructionSet.X86_AVX10v1_V512); if (resultflags.HasInstructionSet(InstructionSet.X86_AVX512VBMI_VL)) resultflags.AddInstructionSet(InstructionSet.X86_AVX10v1_V512); + if (resultflags.HasInstructionSet(InstructionSet.X86_AVX10v1)) + resultflags.AddInstructionSet(InstructionSet.X86_AVX10v2); + if (resultflags.HasInstructionSet(InstructionSet.X86_AVX10v1_V512)) + resultflags.AddInstructionSet(InstructionSet.X86_AVX10v2_V512); if (resultflags.HasInstructionSet(InstructionSet.X86_SSE)) resultflags.AddInstructionSet(InstructionSet.X86_Vector128); if (resultflags.HasInstructionSet(InstructionSet.X86_AVX)) @@ -1353,6 +1397,8 @@ public static IEnumerable ArchitectureToValidInstructionSets yield return new InstructionSetInfo("vectort256", "VectorT256", InstructionSet.X64_VectorT256, true); yield return new InstructionSetInfo("vectort512", "VectorT512", InstructionSet.X64_VectorT512, true); yield return new InstructionSetInfo("apx", "Apx", InstructionSet.X64_APX, true); + yield return new InstructionSetInfo("avx10v2", "Avx10v2", InstructionSet.X64_AVX10v2, true); + yield return new InstructionSetInfo("avx10v2_v512", "Avx10v2_V512", InstructionSet.X64_AVX10v2_V512, true); break; case TargetArchitecture.X86: @@ -1397,6 +1443,8 @@ public static IEnumerable ArchitectureToValidInstructionSets yield return new InstructionSetInfo("vectort256", "VectorT256", InstructionSet.X86_VectorT256, true); yield return new InstructionSetInfo("vectort512", "VectorT512", InstructionSet.X86_VectorT512, true); yield return new InstructionSetInfo("apx", "Apx", InstructionSet.X86_APX, true); + yield return new InstructionSetInfo("avx10v2", "Avx10v2", InstructionSet.X86_AVX10v2, true); + yield return new InstructionSetInfo("avx10v2_v512", "Avx10v2_V512", InstructionSet.X86_AVX10v2_V512, true); break; } } @@ -1478,6 +1526,10 @@ public void Set64BitInstructionSetVariants(TargetArchitecture architecture) AddInstructionSet(InstructionSet.X64_AVX10v1_X64); if (HasInstructionSet(InstructionSet.X64_AVX10v1_V512)) AddInstructionSet(InstructionSet.X64_AVX10v1_V512_X64); + if (HasInstructionSet(InstructionSet.X64_AVX10v2)) + AddInstructionSet(InstructionSet.X64_AVX10v2_X64); + if (HasInstructionSet(InstructionSet.X64_AVX10v2_V512)) + AddInstructionSet(InstructionSet.X64_AVX10v2_V512_X64); break; case TargetArchitecture.X86: @@ -1528,6 +1580,8 @@ public void Set64BitInstructionSetVariantsUnconditionally(TargetArchitecture arc AddInstructionSet(InstructionSet.X64_AVX512VBMI_X64); AddInstructionSet(InstructionSet.X64_AVX10v1_X64); AddInstructionSet(InstructionSet.X64_AVX10v1_V512_X64); + AddInstructionSet(InstructionSet.X64_AVX10v2_X64); + AddInstructionSet(InstructionSet.X64_AVX10v2_V512_X64); break; case TargetArchitecture.X86: @@ -1556,6 +1610,8 @@ public void Set64BitInstructionSetVariantsUnconditionally(TargetArchitecture arc AddInstructionSet(InstructionSet.X86_AVX512VBMI_X64); AddInstructionSet(InstructionSet.X86_AVX10v1_X64); AddInstructionSet(InstructionSet.X86_AVX10v1_V512_X64); + AddInstructionSet(InstructionSet.X86_AVX10v2_X64); + AddInstructionSet(InstructionSet.X86_AVX10v2_V512_X64); break; } } @@ -1860,6 +1916,18 @@ public static InstructionSet LookupPlatformIntrinsicInstructionSet(TargetArchite case "Apx": { return InstructionSet.X64_APX; } + case "Avx10v2": + if (nestedTypeName == "X64") + { return InstructionSet.X64_AVX10v2_X64; } + else + if (nestedTypeName == "V512_X64") + { return InstructionSet.X64_AVX10v2_V512_X64; } + else + if (nestedTypeName == "V512") + { return InstructionSet.X64_AVX10v2_V512; } + else + { return InstructionSet.X64_AVX10v2; } + } break; @@ -1981,6 +2049,12 @@ public static InstructionSet LookupPlatformIntrinsicInstructionSet(TargetArchite case "Apx": { return InstructionSet.X86_APX; } + case "Avx10v2": + if (nestedTypeName == "V512") + { return InstructionSet.X86_AVX10v2_V512; } + else + { return InstructionSet.X86_AVX10v2; } + } break; diff --git a/src/coreclr/tools/Common/JitInterface/ThunkGenerator/InstructionSetDesc.txt b/src/coreclr/tools/Common/JitInterface/ThunkGenerator/InstructionSetDesc.txt index 550ee6b2bd12d7..99a15a2cacfe1a 100644 --- a/src/coreclr/tools/Common/JitInterface/ThunkGenerator/InstructionSetDesc.txt +++ b/src/coreclr/tools/Common/JitInterface/ThunkGenerator/InstructionSetDesc.txt @@ -68,6 +68,8 @@ instructionset ,X86 ,VectorT128 , ,39 ,VectorT128 instructionset ,X86 ,VectorT256 , ,40 ,VectorT256 ,vectort256 instructionset ,X86 ,VectorT512 , ,41 ,VectorT512 ,vectort512 instructionset ,X86 ,Apx , ,48 ,APX ,apx +instructionset ,X86 ,Avx10v2 , ,51 ,AVX10v2 ,avx10v2 +instructionset ,X86 ,Avx10v2_V512 , ,52 ,AVX10v2_V512 ,avx10v2_v512 instructionset64bit,X86 ,X86Base instructionset64bit,X86 ,SSE @@ -94,6 +96,8 @@ instructionset64bit,X86 ,AVX512DQ instructionset64bit,X86 ,AVX512VBMI instructionset64bit,X86 ,AVX10v1 instructionset64bit,X86 ,AVX10v1_V512 +instructionset64bit,X86 ,AVX10v2 +instructionset64bit,X86 ,AVX10v2_V512 vectorinstructionset,X86 ,Vector128 vectorinstructionset,X86 ,Vector256 @@ -164,6 +168,8 @@ implication ,X86 ,AVX10v1_V512 ,AVX512DQ implication ,X86 ,AVX10v1_V512 ,AVX512DQ_VL implication ,X86 ,AVX10v1_V512 ,AVX512VBMI implication ,X86 ,AVX10v1_V512 ,AVX512VBMI_VL +implication ,X86 ,AVX10v2 ,AVX10v1 +implication ,X86 ,AVX10v2_V512 ,AVX10v1_V512 ; These synthetic ISAs need to appear after the core ISAs ; as they depend on the other implications being correct first diff --git a/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs b/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs index f235483b67f8fb..bd4e0a83165571 100644 --- a/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs +++ b/src/coreclr/tools/aot/ILCompiler.Compiler/Compiler/HardwareIntrinsicHelpers.Aot.cs @@ -52,7 +52,9 @@ public static MethodIL EmitIsSupportedIL(MethodDesc method, FieldDesc isSupporte if (!uint.IsPow2((uint)flag)) { // These are the ISAs managed by multiple-bit flags. - // We need to emit different IL to handle the checks. + // we need to emit different IL to handle the checks. + // For now just Avx10v1_V512 = (Avx10v1 | Avx512) && + // Avx10v2_V512 = (Avx10v2 | Avx512) // (isSupportedField & flag) == flag codeStream.Emit(ILOpcode.ldsfld, emit.NewToken(isSupportedField)); codeStream.EmitLdc(flag); diff --git a/src/coreclr/vm/codeman.cpp b/src/coreclr/vm/codeman.cpp index a09481acdf6c4d..2dab2b5edaf3a1 100644 --- a/src/coreclr/vm/codeman.cpp +++ b/src/coreclr/vm/codeman.cpp @@ -1422,6 +1422,19 @@ void EEJitManager::SetCpuInfo() } } } + + if ((cpuFeatures & XArchIntrinsicConstants_Avx10v2) != 0) + { + if (CLRConfig::GetConfigValue(CLRConfig::EXTERNAL_EnableAVX10v2)) + { + CPUCompileFlags.Set(InstructionSet_AVX10v2); + if((cpuFeatures & XArchIntrinsicConstants_Avx512) != 0) + { + CPUCompileFlags.Set(InstructionSet_AVX10v2_V512); + } + } + } + #if defined(TARGET_AMD64) if ((cpuFeatures & XArchIntrinsicConstants_Apx) != 0) { diff --git a/src/native/minipal/cpufeatures.c b/src/native/minipal/cpufeatures.c index 3cd30aee7779d3..e82cf1b59228b1 100644 --- a/src/native/minipal/cpufeatures.c +++ b/src/native/minipal/cpufeatures.c @@ -326,11 +326,15 @@ int minipal_getcpufeatures(void) uint8_t avx10Version = (uint8_t)(cpuidInfo[CPUID_EBX] & 0xFF); if((avx10Version >= 1) && - ((cpuidInfo[CPUID_EBX] & (1 << 16)) != 0) && // Avx10/V128 ((cpuidInfo[CPUID_EBX] & (1 << 17)) != 0)) // Avx10/V256 { result |= XArchIntrinsicConstants_Evex; - result |= XArchIntrinsicConstants_Avx10v1; + result |= XArchIntrinsicConstants_Avx10v1; // Avx10.1 + + if (avx10Version >= 2) // Avx10.2 + { + result |= XArchIntrinsicConstants_Avx10v2; + } // We assume that the Avx10/V512 support can be inferred from // both Avx10v1 and Avx512 being present. diff --git a/src/native/minipal/cpufeatures.h b/src/native/minipal/cpufeatures.h index 101e7ab0a1b615..1a48dbb92dd60c 100644 --- a/src/native/minipal/cpufeatures.h +++ b/src/native/minipal/cpufeatures.h @@ -33,6 +33,7 @@ enum XArchIntrinsicConstants XArchIntrinsicConstants_Evex = 0x80000, XArchIntrinsicConstants_Apx = 0x100000, XArchIntrinsicConstants_Vpclmulqdq = 0x200000, + XArchIntrinsicConstants_Avx10v2 = 0x400000, }; #endif // HOST_X86 || HOST_AMD64