riscv64: Add extension restrictions to assembler. am: a24ffcd413

Original change: https://android-review.googlesource.com/c/platform/art/+/2973813

Change-Id: I8303a57ffaab20090205492d1e93ea194e18fdd1
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 698c7e5..7fdb015 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -1139,25 +1139,28 @@
   }
   EmitLoadReserved(assembler, type, ptr, old_value, load_aqrl);
   XRegister to_store = new_value;
-  if (mask != kNoXRegister) {
-    DCHECK_EQ(expected2, kNoXRegister);
-    DCHECK_NE(masked, kNoXRegister);
-    __ And(masked, old_value, mask);
-    __ Bne(masked, expected, cmp_failure);
-    // The `old_value` does not need to be preserved as the caller shall use `masked`
-    // to return the old value if needed.
-    to_store = old_value;
-    // TODO(riscv64): We could XOR the old and new value before the loop and use a single XOR here
-    // instead of the XOR+OR. (The `new_value` is either Zero or a temporary we can clobber.)
-    __ Xor(to_store, old_value, masked);
-    __ Or(to_store, to_store, new_value);
-  } else if (expected2 != kNoXRegister) {
-    Riscv64Label match2;
-    __ Beq(old_value, expected2, &match2, /*is_bare=*/ true);
-    __ Bne(old_value, expected, cmp_failure);
-    __ Bind(&match2);
-  } else {
-    __ Bne(old_value, expected, cmp_failure);
+  {
+    ScopedLrScExtensionsRestriction slser(assembler);
+    if (mask != kNoXRegister) {
+      DCHECK_EQ(expected2, kNoXRegister);
+      DCHECK_NE(masked, kNoXRegister);
+      __ And(masked, old_value, mask);
+      __ Bne(masked, expected, cmp_failure);
+      // The `old_value` does not need to be preserved as the caller shall use `masked`
+      // to return the old value if needed.
+      to_store = old_value;
+      // TODO(riscv64): We could XOR the old and new value before the loop and use a single XOR here
+      // instead of the XOR+OR. (The `new_value` is either Zero or a temporary we can clobber.)
+      __ Xor(to_store, old_value, masked);
+      __ Or(to_store, to_store, new_value);
+    } else if (expected2 != kNoXRegister) {
+      Riscv64Label match2;
+      __ Beq(old_value, expected2, &match2, /*is_bare=*/ true);
+      __ Bne(old_value, expected, cmp_failure);
+      __ Bind(&match2);
+    } else {
+      __ Bne(old_value, expected, cmp_failure);
+    }
   }
   EmitStoreConditional(assembler, type, ptr, store_result, to_store, store_aqrl);
   if (strong) {
@@ -1826,8 +1829,11 @@
         Riscv64Label retry;
         __ Bind(&retry);
         __ LrW(old_value, ptr, load_aqrl);
-        __ And(temp, old_value, mask);
-        __ Or(temp, temp, arg);
+        {
+          ScopedLrScExtensionsRestriction slser(assembler);
+          __ And(temp, old_value, mask);
+          __ Or(temp, temp, arg);
+        }
         __ ScW(temp, temp, ptr, store_aqrl);
         __ Bnez(temp, &retry, /*is_bare=*/ true);  // Bare: `TMP` shall not be clobbered.
       }
@@ -1845,15 +1851,19 @@
         Riscv64Label retry;
         __ Bind(&retry);
         __ LrW(old_value, ptr, load_aqrl);
-        __ Add(temp, old_value, arg);
-        // We use `(A ^ B) ^ A == B` and with the masking `((A ^ B) & mask) ^ A`, the result
-        // contains bits from `B` for bits specified in `mask` and bits from `A` elsewhere.
-        // Note: These instructions directly depend on each other, so it's not necessarily the
-        // fastest approach but for `(A ^ ~mask) | (B & mask)` we would need an extra register for
-        // `~mask` because ANDN is not in the "I" instruction set as required for a LR/SC sequence.
-        __ Xor(temp, temp, old_value);
-        __ And(temp, temp, mask);
-        __ Xor(temp, temp, old_value);
+        {
+          ScopedLrScExtensionsRestriction slser(assembler);
+          __ Add(temp, old_value, arg);
+          // We use `(A ^ B) ^ A == B` and with the masking `((A ^ B) & mask) ^ A`, the result
+          // contains bits from `B` for bits specified in `mask` and bits from `A` elsewhere.
+          // Note: These instructions directly depend on each other, so it's not necessarily the
+          // fastest approach but for `(A ^ ~mask) | (B & mask)` we would need an extra register
+          // for `~mask` because ANDN is not in the "I" instruction set as required for a LR/SC
+          // sequence.
+          __ Xor(temp, temp, old_value);
+          __ And(temp, temp, mask);
+          __ Xor(temp, temp, old_value);
+        }
         __ ScW(temp, temp, ptr, store_aqrl);
         __ Bnez(temp, &retry, /*is_bare=*/ true);  // Bare: `TMP` shall not be clobbered.
       }
diff --git a/compiler/utils/riscv64/assembler_riscv64.cc b/compiler/utils/riscv64/assembler_riscv64.cc
index eeb4537..09778ad 100644
--- a/compiler/utils/riscv64/assembler_riscv64.cc
+++ b/compiler/utils/riscv64/assembler_riscv64.cc
@@ -114,48 +114,59 @@
 // Load instructions (RV32I+RV64I): opcode = 0x03, funct3 from 0x0 ~ 0x6
 
 void Riscv64Assembler::Lb(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x0, rd, 0x03);
 }
 
 void Riscv64Assembler::Lh(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x1, rd, 0x03);
 }
 
 void Riscv64Assembler::Lw(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x2, rd, 0x03);
 }
 
 void Riscv64Assembler::Ld(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x3, rd, 0x03);
 }
 
 void Riscv64Assembler::Lbu(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x4, rd, 0x03);
 }
 
 void Riscv64Assembler::Lhu(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x5, rd, 0x03);
 }
 
 void Riscv64Assembler::Lwu(XRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitI(offset, rs1, 0x6, rd, 0x3);
 }
 
 // Store instructions (RV32I+RV64I): opcode = 0x23, funct3 from 0x0 ~ 0x3
 
 void Riscv64Assembler::Sb(XRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitS(offset, rs2, rs1, 0x0, 0x23);
 }
 
 void Riscv64Assembler::Sh(XRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitS(offset, rs2, rs1, 0x1, 0x23);
 }
 
 void Riscv64Assembler::Sw(XRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitS(offset, rs2, rs1, 0x2, 0x23);
 }
 
 void Riscv64Assembler::Sd(XRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
   EmitS(offset, rs2, rs1, 0x3, 0x23);
 }
 
@@ -313,7 +324,10 @@
 /////////////////////////// RV64 "Zifencei" Instructions  START ////////////////////////////
 
 // "Zifencei" Standard Extension, opcode = 0xf, funct3 = 1
-void Riscv64Assembler::FenceI() { EmitI(0x0, 0x0, 0x1, 0x0, 0xf); }
+void Riscv64Assembler::FenceI() {
+  AssertExtensionsEnabled(Riscv64Extension::kZifencei);
+  EmitI(0x0, 0x0, 0x1, 0x0, 0xf);
+}
 
 //////////////////////////// RV64 "Zifencei" Instructions  END /////////////////////////////
 
@@ -322,56 +336,69 @@
 // RV32M Standard Extension: opcode = 0x33, funct3 from 0x0 ~ 0x7
 
 void Riscv64Assembler::Mul(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x0, rd, 0x33);
 }
 
 void Riscv64Assembler::Mulh(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x1, rd, 0x33);
 }
 
 void Riscv64Assembler::Mulhsu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x2, rd, 0x33);
 }
 
 void Riscv64Assembler::Mulhu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x3, rd, 0x33);
 }
 
 void Riscv64Assembler::Div(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x4, rd, 0x33);
 }
 
 void Riscv64Assembler::Divu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x5, rd, 0x33);
 }
 
 void Riscv64Assembler::Rem(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x6, rd, 0x33);
 }
 
 void Riscv64Assembler::Remu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x7, rd, 0x33);
 }
 
 // RV64M Standard Extension: opcode = 0x3b, funct3 0x0 and from 0x4 ~ 0x7
 
 void Riscv64Assembler::Mulw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x0, rd, 0x3b);
 }
 
 void Riscv64Assembler::Divw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x4, rd, 0x3b);
 }
 
 void Riscv64Assembler::Divuw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x5, rd, 0x3b);
 }
 
 void Riscv64Assembler::Remw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x6, rd, 0x3b);
 }
 
 void Riscv64Assembler::Remuw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kM);
   EmitR(0x1, rs2, rs1, 0x7, rd, 0x3b);
 }
 
@@ -380,94 +407,116 @@
 /////////////////////////////// RV64 "A" Instructions  START ///////////////////////////////
 
 void Riscv64Assembler::LrW(XRegister rd, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   CHECK(aqrl != AqRl::kRelease);
   EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::LrD(XRegister rd, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   CHECK(aqrl != AqRl::kRelease);
   EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::ScW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   CHECK(aqrl != AqRl::kAcquire);
   EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::ScD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   CHECK(aqrl != AqRl::kAcquire);
   EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoSwapW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoSwapD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoAddW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoAddD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoXorW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoXorD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoAndW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoAndD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoOrW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoOrD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMinW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMinD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMaxW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMaxD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMinuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMinuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMaxuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
 }
 
 void Riscv64Assembler::AmoMaxuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
+  AssertExtensionsEnabled(Riscv64Extension::kA);
   EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
 }
 
@@ -478,26 +527,32 @@
 // "Zicsr" Standard Extension, opcode = 0x73, funct3 from 0x1 ~ 0x3 and 0x5 ~ 0x7
 
 void Riscv64Assembler::Csrrw(XRegister rd, uint32_t csr, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), rs1, 0x1, rd, 0x73);
 }
 
 void Riscv64Assembler::Csrrs(XRegister rd, uint32_t csr, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), rs1, 0x2, rd, 0x73);
 }
 
 void Riscv64Assembler::Csrrc(XRegister rd, uint32_t csr, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), rs1, 0x3, rd, 0x73);
 }
 
 void Riscv64Assembler::Csrrwi(XRegister rd, uint32_t csr, uint32_t uimm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), uimm5, 0x5, rd, 0x73);
 }
 
 void Riscv64Assembler::Csrrsi(XRegister rd, uint32_t csr, uint32_t uimm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), uimm5, 0x6, rd, 0x73);
 }
 
 void Riscv64Assembler::Csrrci(XRegister rd, uint32_t csr, uint32_t uimm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kZicsr);
   EmitI(ToInt12(csr), uimm5, 0x7, rd, 0x73);
 }
 
@@ -508,18 +563,22 @@
 // FP load/store instructions (RV32F+RV32D): opcode = 0x07, 0x27
 
 void Riscv64Assembler::FLw(FRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
   EmitI(offset, rs1, 0x2, rd, 0x07);
 }
 
 void Riscv64Assembler::FLd(FRegister rd, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
   EmitI(offset, rs1, 0x3, rd, 0x07);
 }
 
 void Riscv64Assembler::FSw(FRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
   EmitS(offset, rs2, rs1, 0x2, 0x27);
 }
 
 void Riscv64Assembler::FSd(FRegister rs2, XRegister rs1, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
   EmitS(offset, rs2, rs1, 0x3, 0x27);
 }
 
@@ -527,131 +586,161 @@
 
 void Riscv64Assembler::FMAddS(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
 }
 
 void Riscv64Assembler::FMAddD(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
 }
 
 void Riscv64Assembler::FMSubS(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
 }
 
 void Riscv64Assembler::FMSubD(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
 }
 
 void Riscv64Assembler::FNMSubS(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
 }
 
 void Riscv64Assembler::FNMSubD(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
 }
 
 void Riscv64Assembler::FNMAddS(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
 }
 
 void Riscv64Assembler::FNMAddD(
     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
 }
 
 // Simple FP instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b0XXXX0D
 
 void Riscv64Assembler::FAddS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FAddD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FSubS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x4, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FSubD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x5, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FMulS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x8, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FMulD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x9, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FDivS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0xc, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FDivD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0xd, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FSqrtS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x2c, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FSqrtD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x2d, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjS(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x10, rs2, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjD(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x11, rs2, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjnS(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x10, rs2, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjnD(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x11, rs2, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjxS(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x10, rs2, rs1, 0x2, rd, 0x53);
 }
 
 void Riscv64Assembler::FSgnjxD(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x11, rs2, rs1, 0x2, rd, 0x53);
 }
 
 void Riscv64Assembler::FMinS(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x14, rs2, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FMinD(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x15, rs2, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FMaxS(FRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x14, rs2, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FMaxD(FRegister rd, FRegister rs1, FRegister rs2) {
   EmitR(0x15, rs2, rs1, 0x1, rd, 0x53);
+  AssertExtensionsEnabled(Riscv64Extension::kD);
 }
 
 void Riscv64Assembler::FCvtSD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
   EmitR(0x20, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtDS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
   // Note: The `frm` is useless, the result can represent every value of the source exactly.
   EmitR(0x21, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
@@ -659,122 +748,150 @@
 // FP compare instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b101000D
 
 void Riscv64Assembler::FEqS(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x50, rs2, rs1, 0x2, rd, 0x53);
 }
 
 void Riscv64Assembler::FEqD(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x51, rs2, rs1, 0x2, rd, 0x53);
 }
 
 void Riscv64Assembler::FLtS(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x50, rs2, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FLtD(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x51, rs2, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FLeS(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x50, rs2, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FLeD(XRegister rd, FRegister rs1, FRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x51, rs2, rs1, 0x0, rd, 0x53);
 }
 
 // FP conversion instructions (RV32F+RV32D+RV64F+RV64D): opcode = 0x53, funct7 = 0b110X00D
 
 void Riscv64Assembler::FCvtWS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x60, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtWD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x61, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtWuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x60, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtWuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x61, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtLS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x60, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtLD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x61, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtLuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x60, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtLuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x61, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtSW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x68, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtDW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   // Note: The `frm` is useless, the result can represent every value of the source exactly.
   EmitR(0x69, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtSWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x68, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtDWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   // Note: The `frm` is useless, the result can represent every value of the source exactly.
   EmitR(0x69, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtSL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x68, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtDL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x69, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtSLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x68, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 void Riscv64Assembler::FCvtDLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x69, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
 }
 
 // FP move instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x0, funct7 = 0b111X00D
 
 void Riscv64Assembler::FMvXW(XRegister rd, FRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x70, 0x0, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FMvXD(XRegister rd, FRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x71, 0x0, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FMvWX(FRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x78, 0x0, rs1, 0x0, rd, 0x53);
 }
 
 void Riscv64Assembler::FMvDX(FRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x79, 0x0, rs1, 0x0, rd, 0x53);
 }
 
 // FP classify instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x1, funct7 = 0b111X00D
 
 void Riscv64Assembler::FClassS(XRegister rd, FRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kF);
   EmitR(0x70, 0x0, rs1, 0x1, rd, 0x53);
 }
 
 void Riscv64Assembler::FClassD(XRegister rd, FRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kD);
   EmitR(0x71, 0x0, rs1, 0x1, rd, 0x53);
 }
 
@@ -783,86 +900,101 @@
 /////////////////////////////// RV64 "C" Instructions  START /////////////////////////////
 
 void Riscv64Assembler::CLwsp(XRegister rd, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
-
   EmitCI(0b010u, rd, ExtractOffset52_76(offset), 0b10u);
 }
 
 void Riscv64Assembler::CLdsp(XRegister rd, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
-
   EmitCI(0b011u, rd, ExtractOffset53_86(offset), 0b10u);
 }
 
 void Riscv64Assembler::CFLdsp(FRegister rd, int32_t offset) {
+  AssertExtensionsEnabled(
+      Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
   EmitCI(0b001u, rd, ExtractOffset53_86(offset), 0b10u);
 }
 
 void Riscv64Assembler::CSwsp(XRegister rs2, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCSS(0b110u, ExtractOffset52_76(offset), rs2, 0b10u);
 }
 
 void Riscv64Assembler::CSdsp(XRegister rs2, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCSS(0b111u, ExtractOffset53_86(offset), rs2, 0b10u);
 }
 
 void Riscv64Assembler::CFSdsp(FRegister rs2, int32_t offset) {
+  AssertExtensionsEnabled(
+      Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
   EmitCSS(0b101u, ExtractOffset53_86(offset), rs2, 0b10u);
 }
 
 void Riscv64Assembler::CLw(XRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCM(0b010u, ExtractOffset52_6(offset), rs1_s, rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CLd(XRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCM(0b011u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CFLd(FRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(
+      Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
   EmitCM(0b001u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CSw(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCM(0b110u, ExtractOffset52_6(offset), rs1_s, rs2_s, 0b00u);
 }
 
 void Riscv64Assembler::CSd(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
   EmitCM(0b111u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
 }
 
 void Riscv64Assembler::CFSd(FRegister rs2_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(
+      Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
   EmitCM(0b101u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
 }
 
 void Riscv64Assembler::CLi(XRegister rd, int32_t imm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
   DCHECK(IsInt<6>(imm));
-
   EmitCI(0b010u, rd, EncodeInt6(imm), 0b01u);
 }
 
 void Riscv64Assembler::CLui(XRegister rd, uint32_t nzimm6) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
   DCHECK_NE(rd, SP);
   DCHECK(IsImmCLuiEncodable(nzimm6));
-
   EmitCI(0b011u, rd, nzimm6 & MaskLeastSignificant<uint32_t>(6), 0b01u);
 }
 
 void Riscv64Assembler::CAddi(XRegister rd, int32_t nzimm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
   DCHECK_NE(nzimm, 0);
-
   EmitCI(0b000u, rd, EncodeInt6(nzimm), 0b01u);
 }
 
 void Riscv64Assembler::CAddiw(XRegister rd, int32_t imm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
-
   EmitCI(0b001u, rd, EncodeInt6(imm), 0b01u);
 }
 
 void Riscv64Assembler::CAddi16Sp(int32_t nzimm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(nzimm, 0);
   DCHECK(IsAligned<16>(nzimm));
 
@@ -880,6 +1012,7 @@
 }
 
 void Riscv64Assembler::CAddi4Spn(XRegister rd_s, uint32_t nzuimm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(nzuimm, 0u);
   DCHECK(IsAligned<4>(nzuimm));
   DCHECK(IsUint<10>(nzuimm));
@@ -894,187 +1027,227 @@
 }
 
 void Riscv64Assembler::CSlli(XRegister rd, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(shamt, 0);
   DCHECK_NE(rd, Zero);
-
   EmitCI(0b000u, rd, shamt, 0b10u);
 }
 
 void Riscv64Assembler::CSrli(XRegister rd_s, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(shamt, 0);
   DCHECK(IsUint<6>(shamt));
-
   EmitCBArithmetic(0b100u, 0b00u, shamt, rd_s, 0b01u);
 }
 
 void Riscv64Assembler::CSrai(XRegister rd_s, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(shamt, 0);
   DCHECK(IsUint<6>(shamt));
-
   EmitCBArithmetic(0b100u, 0b01u, shamt, rd_s, 0b01u);
 }
 
 void Riscv64Assembler::CAndi(XRegister rd_s, int32_t imm) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK(IsInt<6>(imm));
-
   EmitCBArithmetic(0b100u, 0b10u, imm, rd_s, 0b01u);
 }
 
 void Riscv64Assembler::CMv(XRegister rd, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
   DCHECK_NE(rs2, Zero);
-
   EmitCR(0b1000u, rd, rs2, 0b10u);
 }
 
 void Riscv64Assembler::CAdd(XRegister rd, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rd, Zero);
   DCHECK_NE(rs2, Zero);
-
   EmitCR(0b1001u, rd, rs2, 0b10u);
 }
 
 void Riscv64Assembler::CAnd(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100011u, rd_s, 0b11u, rs2_s, 0b01u);
 }
 
 void Riscv64Assembler::COr(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100011u, rd_s, 0b10u, rs2_s, 0b01u);
 }
 
 void Riscv64Assembler::CXor(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100011u, rd_s, 0b01u, rs2_s, 0b01u);
 }
 
 void Riscv64Assembler::CSub(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100011u, rd_s, 0b00u, rs2_s, 0b01u);
 }
 
 void Riscv64Assembler::CAddw(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100111u, rd_s, 0b01u, rs2_s, 0b01u);
 }
 
 void Riscv64Assembler::CSubw(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCAReg(0b100111u, rd_s, 0b00u, rs2_s, 0b01u);
 }
 
 // "Zcb" Standard Extension, part of "C", opcode = 0b00, 0b01, funct3 = 0b100.
 
 void Riscv64Assembler::CLbu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
   EmitCAReg(0b100000u, rs1_s, EncodeOffset0_1(offset), rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CLhu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
   DCHECK(IsUint<2>(offset));
   DCHECK_ALIGNED(offset, 2);
   EmitCAReg(0b100001u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CLh(XRegister rd_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
   DCHECK(IsUint<2>(offset));
   DCHECK_ALIGNED(offset, 2);
   EmitCAReg(0b100001u, rs1_s, 0b10 | BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
 }
 
 void Riscv64Assembler::CSb(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
   EmitCAReg(0b100010u, rs1_s, EncodeOffset0_1(offset), rs2_s, 0b00u);
 }
 
 void Riscv64Assembler::CSh(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
   DCHECK(IsUint<2>(offset));
   DCHECK_ALIGNED(offset, 2);
   EmitCAReg(0b100011u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rs2_s, 0b00u);
 }
 
-void Riscv64Assembler::CZext_b(XRegister rd_rs1_s) {
+void Riscv64Assembler::CZextB(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b000u, 0b01u);
 }
 
-void Riscv64Assembler::CSext_b(XRegister rd_rs1_s) {
+void Riscv64Assembler::CSextB(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b001u, 0b01u);
 }
 
-void Riscv64Assembler::CZext_h(XRegister rd_rs1_s) {
+void Riscv64Assembler::CZextH(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b010u, 0b01u);
 }
 
-void Riscv64Assembler::CSext_h(XRegister rd_rs1_s) {
+void Riscv64Assembler::CSextH(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b011u, 0b01u);
 }
 
-void Riscv64Assembler::CZext_w(XRegister rd_rs1_s) {
+void Riscv64Assembler::CZextW(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba, Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b100u, 0b01u);
 }
 
 void Riscv64Assembler::CNot(XRegister rd_rs1_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kZcb);
   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b101u, 0b01u);
 }
 
 void Riscv64Assembler::CMul(XRegister rd_s, XRegister rs2_s) {
+  AssertExtensionsEnabled(Riscv64Extension::kM, Riscv64Extension::kZcb);
   EmitCAReg(0b100111u, rd_s, 0b10u, rs2_s, 0b01u);
 }
 
-void Riscv64Assembler::CJ(int32_t offset) { EmitCJ(0b101u, offset, 0b01u); }
+void Riscv64Assembler::CJ(int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
+  EmitCJ(0b101u, offset, 0b01u);
+}
 
 void Riscv64Assembler::CJr(XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rs1, Zero);
-
   EmitCR(0b1000u, rs1, Zero, 0b10u);
 }
 
 void Riscv64Assembler::CJalr(XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   DCHECK_NE(rs1, Zero);
-
   EmitCR(0b1001u, rs1, Zero, 0b10u);
 }
 
 void Riscv64Assembler::CBeqz(XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCBBranch(0b110u, offset, rs1_s, 0b01u);
 }
 
 void Riscv64Assembler::CBnez(XRegister rs1_s, int32_t offset) {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
   EmitCBBranch(0b111u, offset, rs1_s, 0b01u);
 }
 
-void Riscv64Assembler::CEbreak() { EmitCR(0b1001u, Zero, Zero, 0b10u); }
+void Riscv64Assembler::CEbreak() {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
+  EmitCR(0b1001u, Zero, Zero, 0b10u);
+}
 
-void Riscv64Assembler::CNop() { EmitCI(0b000u, Zero, 0u, 0b01u); }
+void Riscv64Assembler::CNop() {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
+  EmitCI(0b000u, Zero, 0u, 0b01u);
+}
 
-void Riscv64Assembler::CUnimp() { Emit16(0x0u); }
+void Riscv64Assembler::CUnimp() {
+  AssertExtensionsEnabled(Riscv64Extension::kZca);
+  Emit16(0x0u);
+}
 
 /////////////////////////////// RV64 "C" Instructions  END ///////////////////////////////
 
 ////////////////////////////// RV64 "Zba" Instructions  START /////////////////////////////
 
 void Riscv64Assembler::AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x4, rs2, rs1, 0x0, rd, 0x3b);
 }
 
 void Riscv64Assembler::Sh1Add(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x2, rd, 0x33);
 }
 
 void Riscv64Assembler::Sh1AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x2, rd, 0x3b);
 }
 
 void Riscv64Assembler::Sh2Add(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x4, rd, 0x33);
 }
 
 void Riscv64Assembler::Sh2AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x4, rd, 0x3b);
 }
 
 void Riscv64Assembler::Sh3Add(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x6, rd, 0x33);
 }
 
 void Riscv64Assembler::Sh3AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitR(0x10, rs2, rs1, 0x6, rd, 0x3b);
 }
 
 void Riscv64Assembler::SlliUw(XRegister rd, XRegister rs1, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZba);
   EmitI6(0x2, shamt, rs1, 0x1, rd, 0x1b);
 }
 
@@ -1083,100 +1256,124 @@
 ////////////////////////////// RV64 "Zbb" Instructions  START /////////////////////////////
 
 void Riscv64Assembler::Andn(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x20, rs2, rs1, 0x7, rd, 0x33);
 }
 
 void Riscv64Assembler::Orn(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x20, rs2, rs1, 0x6, rd, 0x33);
 }
 
 void Riscv64Assembler::Xnor(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x20, rs2, rs1, 0x4, rd, 0x33);
 }
 
 void Riscv64Assembler::Clz(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x0, rs1, 0x1, rd, 0x13);
 }
 
 void Riscv64Assembler::Clzw(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x0, rs1, 0x1, rd, 0x1b);
 }
 
 void Riscv64Assembler::Ctz(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x1, rs1, 0x1, rd, 0x13);
 }
 
 void Riscv64Assembler::Ctzw(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x1, rs1, 0x1, rd, 0x1b);
 }
 
 void Riscv64Assembler::Cpop(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x2, rs1, 0x1, rd, 0x13);
 }
 
 void Riscv64Assembler::Cpopw(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x2, rs1, 0x1, rd, 0x1b);
 }
 
 void Riscv64Assembler::Min(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x5, rs2, rs1, 0x4, rd, 0x33);
 }
 
 void Riscv64Assembler::Minu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x5, rs2, rs1, 0x5, rd, 0x33);
 }
 
 void Riscv64Assembler::Max(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x5, rs2, rs1, 0x6, rd, 0x33);
 }
 
 void Riscv64Assembler::Maxu(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x5, rs2, rs1, 0x7, rd, 0x33);
 }
 
 void Riscv64Assembler::Rol(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, rs2, rs1, 0x1, rd, 0x33);
 }
 
 void Riscv64Assembler::Rolw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, rs2, rs1, 0x1, rd, 0x3b);
 }
 
 void Riscv64Assembler::Ror(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, rs2, rs1, 0x5, rd, 0x33);
 }
 
 void Riscv64Assembler::Rorw(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, rs2, rs1, 0x5, rd, 0x3b);
 }
 
 void Riscv64Assembler::Rori(XRegister rd, XRegister rs1, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   CHECK_LT(static_cast<uint32_t>(shamt), 64u);
   EmitI6(0x18, shamt, rs1, 0x5, rd, 0x13);
 }
 
 void Riscv64Assembler::Roriw(XRegister rd, XRegister rs1, int32_t shamt) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   CHECK_LT(static_cast<uint32_t>(shamt), 32u);
   EmitI6(0x18, shamt, rs1, 0x5, rd, 0x1b);
 }
 
 void Riscv64Assembler::OrcB(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x14, 0x7, rs1, 0x5, rd, 0x13);
 }
 
 void Riscv64Assembler::Rev8(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x35, 0x18, rs1, 0x5, rd, 0x13);
 }
 
 void Riscv64Assembler::ZbbSextB(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x4, rs1, 0x1, rd, 0x13);
 }
 
 void Riscv64Assembler::ZbbSextH(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x30, 0x5, rs1, 0x1, rd, 0x13);
 }
 
 void Riscv64Assembler::ZbbZextH(XRegister rd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kZbb);
   EmitR(0x4, 0x0, rs1, 0x4, rd, 0x3b);
 }
 
@@ -1185,17 +1382,20 @@
 /////////////////////////////// RVV "VSet" Instructions  START ////////////////////////////
 
 void Riscv64Assembler::VSetvli(XRegister rd, XRegister rs1, uint32_t vtypei) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(IsUint<11>(vtypei));
   EmitI(vtypei, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
 }
 
 void Riscv64Assembler::VSetivli(XRegister rd, uint32_t uimm, uint32_t vtypei) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(IsUint<10>(vtypei));
   DCHECK(IsUint<5>(uimm));
   EmitI((~0U << 10 | vtypei), uimm, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
 }
 
 void Riscv64Assembler::VSetvl(XRegister rd, XRegister rs1, XRegister rs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   EmitR(0x40, rs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
 }
 
@@ -1204,1698 +1404,2004 @@
 /////////////////////////////// RVV Load/Store Instructions  START ////////////////////////////
 
 void Riscv64Assembler::VLe8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSe8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSe16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSe32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSe64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLm(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vd, 0x7);
 }
 
 void Riscv64Assembler::VSm(VRegister vs3, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLe8ff(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe16ff(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe32ff(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLe64ff(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLse8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLse16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLse32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLse64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSse8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSse16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSse32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSse64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLoxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSoxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLseg2e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e8(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e16(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e32(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e64(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSseg2e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg2e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg2e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg2e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg3e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg3e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg3e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg3e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg4e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg4e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg4e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg4e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg5e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg5e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg5e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg5e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg6e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg6e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg6e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg6e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg7e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg7e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg7e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg7e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg8e8(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg8e16(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg8e32(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSseg8e64(VRegister vs3, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLseg2e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg2e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg3e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg4e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg5e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg6e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg7e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e8ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e16ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e32ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLseg8e64ff(VRegister vd, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg2e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg2e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg2e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg2e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg3e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg3e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg3e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg3e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg4e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg4e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg4e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg4e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg5e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg5e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg5e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg5e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg6e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg6e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg6e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg6e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg7e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg7e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg7e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg7e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg8e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg8e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg8e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLsseg8e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSsseg2e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg2e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg2e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg2e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg3e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg3e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg3e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg3e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg4e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg4e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg4e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg4e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg5e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg5e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg5e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg5e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg6e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg6e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg6e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg6e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg7e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg7e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg7e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg7e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg8e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg8e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg8e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSsseg8e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLuxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLuxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSuxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSuxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VLoxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VLoxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VSoxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
 }
 
 void Riscv64Assembler::VSoxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
 }
 
 void Riscv64Assembler::VL1re8(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VL1re16(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VL1re32(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VL1re64(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VL2re8(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VL2re16(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VL2re32(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VL2re64(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VL4re8(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VL4re16(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VL4re32(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VL4re64(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
 }
 
 void Riscv64Assembler::VL8re8(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
 }
 
 void Riscv64Assembler::VL8re16(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
 }
 
 void Riscv64Assembler::VL8re32(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
 }
 
 void Riscv64Assembler::VL8re64(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
@@ -2910,21 +3416,25 @@
 void Riscv64Assembler::VL8r(VRegister vd, XRegister rs1) { VL8re8(vd, rs1); }
 
 void Riscv64Assembler::VS1r(VRegister vs3, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
 }
 
 void Riscv64Assembler::VS2r(VRegister vs3, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
 }
 
 void Riscv64Assembler::VS4r(VRegister vs3, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
 }
 
 void Riscv64Assembler::VS8r(VRegister vs3, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
 }
@@ -2934,42 +3444,49 @@
 /////////////////////////////// RVV Arithmetic Instructions  START ////////////////////////////
 
 void Riscv64Assembler::VAdd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAdd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAdd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VRsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VRsub_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
@@ -2978,101 +3495,118 @@
 void Riscv64Assembler::VNeg_v(VRegister vd, VRegister vs2) { VRsub_vx(vd, vs2, Zero); }
 
 void Riscv64Assembler::VMinu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMinu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMin_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMaxu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMaxu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMax_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAnd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAnd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAnd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VOr_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VOr_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VOr_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VXor_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VXor_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VXor_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
@@ -3081,6 +3615,7 @@
 void Riscv64Assembler::VNot_v(VRegister vd, VRegister vs2, VM vm) { VXor_vi(vd, vs2, -1, vm); }
 
 void Riscv64Assembler::VRgather_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -3089,6 +3624,7 @@
 }
 
 void Riscv64Assembler::VRgather_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
@@ -3096,6 +3632,7 @@
 }
 
 void Riscv64Assembler::VRgather_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
@@ -3103,6 +3640,7 @@
 }
 
 void Riscv64Assembler::VSlideup_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
@@ -3110,6 +3648,7 @@
 }
 
 void Riscv64Assembler::VSlideup_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
@@ -3117,6 +3656,7 @@
 }
 
 void Riscv64Assembler::VRgatherei16_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -3125,6 +3665,7 @@
 }
 
 void Riscv64Assembler::VSlidedown_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
@@ -3132,183 +3673,216 @@
 }
 
 void Riscv64Assembler::VSlidedown_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VAdc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAdc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAdc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadc_vi(VRegister vd, VRegister vs2, int32_t imm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsbc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsbc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMerge_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMerge_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMerge_vim(VRegister vd, VRegister vs2, int32_t imm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMv_vv(VRegister vd, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
   EmitR(funct7, V0, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMv_vx(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
   EmitR(funct7, V0, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMv_vi(VRegister vd, int32_t imm5) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
   EmitR(funct7, V0, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMseq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMseq_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMseq_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsne_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsne_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsltu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsltu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsgtu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   VMsltu_vv(vd, vs1, vs2, vm);
 }
 
 void Riscv64Assembler::VMslt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMslt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
@@ -3319,18 +3893,21 @@
 }
 
 void Riscv64Assembler::VMsleu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsleu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsleu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
@@ -3346,18 +3923,21 @@
 }
 
 void Riscv64Assembler::VMsle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsle_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsle_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
@@ -3372,29 +3952,34 @@
 }
 
 void Riscv64Assembler::VMsgtu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsgtu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsgeu_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   CHECK(IsUint<4>(aimm5 - 1)) << "Should be between [1, 16]" << aimm5;
   VMsgtu_vi(vd, vs2, aimm5 - 1, vm);
 }
 
 void Riscv64Assembler::VMsgt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsgt_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
@@ -3405,102 +3990,119 @@
 }
 
 void Riscv64Assembler::VSaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSaddu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSadd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSll_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSll_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSll_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::Vmv1r_v(VRegister vd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
   EmitR(
       funct7, vs2, enum_cast<uint32_t>(Nf::k1), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::Vmv2r_v(VRegister vd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_EQ(enum_cast<uint32_t>(vd) % 2, 0u);
   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 2, 0u);
   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
@@ -3509,6 +4111,7 @@
 }
 
 void Riscv64Assembler::Vmv4r_v(VRegister vd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_EQ(enum_cast<uint32_t>(vd) % 4, 0u);
   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 4, 0u);
   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
@@ -3517,6 +4120,7 @@
 }
 
 void Riscv64Assembler::Vmv8r_v(VRegister vd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_EQ(enum_cast<uint32_t>(vd) % 8, 0u);
   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 8, 0u);
   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
@@ -3525,252 +4129,296 @@
 }
 
 void Riscv64Assembler::VSrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSsra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VNsrl_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VNsrl_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNsrl_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VNcvt_x_x_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   VNsrl_wx(vd, vs2, Zero, vm);
 }
 
 void Riscv64Assembler::VNsra_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VNsra_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNsra_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclipu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclipu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclipu_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclip_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclip_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNclip_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
 }
 
 void Riscv64Assembler::VWredsumu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VWredsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedand_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedxor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedminu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedmaxu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRedmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VAsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VAsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VSlide1up_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
@@ -3778,12 +4426,14 @@
 }
 
 void Riscv64Assembler::VSlide1down_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VCompress_vm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
@@ -3791,11 +4441,13 @@
 }
 
 void Riscv64Assembler::VMandn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011000, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011001, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
@@ -3803,11 +4455,13 @@
 void Riscv64Assembler::VMmv_m(VRegister vd, VRegister vs2) { VMand_mm(vd, vs2, vs2); }
 
 void Riscv64Assembler::VMor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011010, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMxor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011011, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
@@ -3815,11 +4469,13 @@
 void Riscv64Assembler::VMclr_m(VRegister vd) { VMxor_mm(vd, vd, vd); }
 
 void Riscv64Assembler::VMorn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011100, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMnand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011101, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
@@ -3827,11 +4483,13 @@
 void Riscv64Assembler::VMnot_m(VRegister vd, VRegister vs2) { VMnand_mm(vd, vs2, vs2); }
 
 void Riscv64Assembler::VMnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011110, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMxnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b011111, VM::kUnmasked);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
@@ -3839,138 +4497,161 @@
 void Riscv64Assembler::VMset_m(VRegister vd) { VMxnor_mm(vd, vd, vd); }
 
 void Riscv64Assembler::VDivu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VDivu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VDiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VDiv_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VRemu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRemu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VRem_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VRem_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulhu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulhu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulhsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulhsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulh_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMulh_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMadd_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VNmsub_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VNmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -3979,12 +4660,14 @@
 }
 
 void Riscv64Assembler::VNmsac_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VWaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -3993,6 +4676,7 @@
 }
 
 void Riscv64Assembler::VWaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
@@ -4004,6 +4688,7 @@
 }
 
 void Riscv64Assembler::VWadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4012,6 +4697,7 @@
 }
 
 void Riscv64Assembler::VWadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
@@ -4023,6 +4709,7 @@
 }
 
 void Riscv64Assembler::VWsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4031,6 +4718,7 @@
 }
 
 void Riscv64Assembler::VWsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
@@ -4038,6 +4726,7 @@
 }
 
 void Riscv64Assembler::VWsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4046,6 +4735,7 @@
 }
 
 void Riscv64Assembler::VWsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
@@ -4053,6 +4743,7 @@
 }
 
 void Riscv64Assembler::VWaddu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
@@ -4060,12 +4751,14 @@
 }
 
 void Riscv64Assembler::VWaddu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VWadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
@@ -4073,12 +4766,14 @@
 }
 
 void Riscv64Assembler::VWadd_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VWsubu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
@@ -4086,12 +4781,14 @@
 }
 
 void Riscv64Assembler::VWsubu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VWsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
@@ -4099,12 +4796,14 @@
 }
 
 void Riscv64Assembler::VWsub_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VWmulu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4113,6 +4812,7 @@
 }
 
 void Riscv64Assembler::VWmulu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
@@ -4120,6 +4820,7 @@
 }
 
 void Riscv64Assembler::VWmulsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4128,6 +4829,7 @@
 }
 
 void Riscv64Assembler::VWmulsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111010, vm);
@@ -4135,6 +4837,7 @@
 }
 
 void Riscv64Assembler::VWmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4143,6 +4846,7 @@
 }
 
 void Riscv64Assembler::VWmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111011, vm);
@@ -4150,6 +4854,7 @@
 }
 
 void Riscv64Assembler::VWmaccu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4158,6 +4863,7 @@
 }
 
 void Riscv64Assembler::VWmaccu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
@@ -4165,6 +4871,7 @@
 }
 
 void Riscv64Assembler::VWmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4173,6 +4880,7 @@
 }
 
 void Riscv64Assembler::VWmacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
@@ -4180,6 +4888,7 @@
 }
 
 void Riscv64Assembler::VWmaccus_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
@@ -4187,6 +4896,7 @@
 }
 
 void Riscv64Assembler::VWmaccsu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4195,6 +4905,7 @@
 }
 
 void Riscv64Assembler::VWmaccsu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
@@ -4202,92 +4913,108 @@
 }
 
 void Riscv64Assembler::VFadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmin_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFredmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmax_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFredmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsgnj_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsgnj_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsgnjn_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsgnjn_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
@@ -4296,12 +5023,14 @@
 void Riscv64Assembler::VFneg_v(VRegister vd, VRegister vs) { VFsgnjn_vv(vd, vs, vs); }
 
 void Riscv64Assembler::VFsgnjx_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsgnjx_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
@@ -4310,6 +5039,7 @@
 void Riscv64Assembler::VFabs_v(VRegister vd, VRegister vs) { VFsgnjx_vv(vd, vs, vs); }
 
 void Riscv64Assembler::VFslide1up_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
@@ -4317,41 +5047,48 @@
 }
 
 void Riscv64Assembler::VFslide1down_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmerge_vfm(VRegister vd, VRegister vs2, FRegister fs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK(vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmv_v_f(VRegister vd, FRegister fs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
   EmitR(funct7, V0, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfeq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfeq_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfle_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
@@ -4362,12 +5099,14 @@
 }
 
 void Riscv64Assembler::VMflt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMflt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
@@ -4378,161 +5117,188 @@
 }
 
 void Riscv64Assembler::VMfne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfne_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfgt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VMfge_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFdiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFrdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFrsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4541,6 +5307,7 @@
 }
 
 void Riscv64Assembler::VFwadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
@@ -4548,12 +5315,14 @@
 }
 
 void Riscv64Assembler::VFwredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4562,6 +5331,7 @@
 }
 
 void Riscv64Assembler::VFwsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
@@ -4569,11 +5339,13 @@
 }
 
 void Riscv64Assembler::VFwredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
@@ -4581,12 +5353,14 @@
 }
 
 void Riscv64Assembler::VFwadd_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
@@ -4594,12 +5368,14 @@
 }
 
 void Riscv64Assembler::VFwsub_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4608,6 +5384,7 @@
 }
 
 void Riscv64Assembler::VFwmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
@@ -4615,6 +5392,7 @@
 }
 
 void Riscv64Assembler::VFwmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4623,6 +5401,7 @@
 }
 
 void Riscv64Assembler::VFwmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
@@ -4630,6 +5409,7 @@
 }
 
 void Riscv64Assembler::VFwnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4638,6 +5418,7 @@
 }
 
 void Riscv64Assembler::VFwnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
@@ -4645,6 +5426,7 @@
 }
 
 void Riscv64Assembler::VFwmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4653,6 +5435,7 @@
 }
 
 void Riscv64Assembler::VFwmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
@@ -4660,6 +5443,7 @@
 }
 
 void Riscv64Assembler::VFwnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs1);
   DCHECK(vd != vs2);
@@ -4668,6 +5452,7 @@
 }
 
 void Riscv64Assembler::VFwnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
@@ -4675,108 +5460,127 @@
 }
 
 void Riscv64Assembler::VMv_s_x(VRegister vd, XRegister rs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
 }
 
 void Riscv64Assembler::VMv_x_s(XRegister rd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
 }
 
 void Riscv64Assembler::VCpop_m(XRegister rd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
 }
 
 void Riscv64Assembler::VFirst_m(XRegister rd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
   EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
 }
 
 void Riscv64Assembler::VZext_vf8(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSext_vf8(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VZext_vf4(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSext_vf4(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VZext_vf2(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VSext_vf2(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmv_s_f(VRegister vd, FRegister fs1) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
   EmitR(funct7, 0b00000, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
 }
 
 void Riscv64Assembler::VFmv_f_s(FRegister fd, VRegister vs2) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), fd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFwcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4784,6 +5588,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4791,6 +5596,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4798,6 +5604,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4805,6 +5612,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_f_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4812,6 +5620,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4819,6 +5628,7 @@
 }
 
 void Riscv64Assembler::VFwcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
@@ -4826,78 +5636,91 @@
 }
 
 void Riscv64Assembler::VFncvt_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_x_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_f_xu_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_f_x_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_f_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_rod_f_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_rtz_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFncvt_rtz_x_f_w(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
   EmitR(funct7, vs2, 0b10111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFsqrt_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFrsqrt7_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
   EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFrec7_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
   EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VFclass_v(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
 }
 
 void Riscv64Assembler::VMsbf_m(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
@@ -4905,6 +5728,7 @@
 }
 
 void Riscv64Assembler::VMsof_m(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
@@ -4912,6 +5736,7 @@
 }
 
 void Riscv64Assembler::VMsif_m(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
@@ -4919,6 +5744,7 @@
 }
 
 void Riscv64Assembler::VIota_m(VRegister vd, VRegister vs2, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   DCHECK(vd != vs2);
   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
@@ -4926,6 +5752,7 @@
 }
 
 void Riscv64Assembler::VId_v(VRegister vd, VM vm) {
+  AssertExtensionsEnabled(Riscv64Extension::kV);
   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
   EmitR(funct7, V0, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
@@ -4952,28 +5779,75 @@
 void Riscv64Assembler::NegW(XRegister rd, XRegister rs) { Subw(rd, Zero, rs); }
 
 void Riscv64Assembler::SextB(XRegister rd, XRegister rs) {
-  Slli(rd, rs, kXlen - 8u);
-  Srai(rd, rd, kXlen - 8u);
+  if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
+    if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
+      CSextB(rd);
+    } else {
+      ZbbSextB(rd, rs);
+    }
+  } else {
+    Slli(rd, rs, kXlen - 8u);
+    Srai(rd, rd, kXlen - 8u);
+  }
 }
 
 void Riscv64Assembler::SextH(XRegister rd, XRegister rs) {
-  Slli(rd, rs, kXlen - 16u);
-  Srai(rd, rd, kXlen - 16u);
+  if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
+    if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
+      CSextH(rd);
+    } else {
+      ZbbSextH(rd, rs);
+    }
+  } else {
+    Slli(rd, rs, kXlen - 16u);
+    Srai(rd, rd, kXlen - 16u);
+  }
 }
 
-void Riscv64Assembler::SextW(XRegister rd, XRegister rs) { Addiw(rd, rs, 0); }
+void Riscv64Assembler::SextW(XRegister rd, XRegister rs) {
+  if (IsExtensionEnabled(Riscv64Extension::kZca) && rd != Zero && (rd == rs || rs == Zero)) {
+    if (rd == rs) {
+      CAddiw(rd, 0);
+    } else {
+      CLi(rd, 0);
+    }
+  } else {
+    Addiw(rd, rs, 0);
+  }
+}
 
-void Riscv64Assembler::ZextB(XRegister rd, XRegister rs) { Andi(rd, rs, 0xff); }
+void Riscv64Assembler::ZextB(XRegister rd, XRegister rs) {
+  if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
+    CZextB(rd);
+  } else {
+    Andi(rd, rs, 0xff);
+  }
+}
 
 void Riscv64Assembler::ZextH(XRegister rd, XRegister rs) {
-  Slli(rd, rs, kXlen - 16u);
-  Srli(rd, rd, kXlen - 16u);
+  if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
+    if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
+      CZextH(rd);
+    } else {
+      ZbbZextH(rd, rs);
+    }
+  } else {
+    Slli(rd, rs, kXlen - 16u);
+    Srli(rd, rd, kXlen - 16u);
+  }
 }
 
 void Riscv64Assembler::ZextW(XRegister rd, XRegister rs) {
-  // TODO(riscv64): Use the ZEXT.W alias for ADD.UW from the Zba extension.
-  Slli(rd, rs, kXlen - 32u);
-  Srli(rd, rd, kXlen - 32u);
+  if (IsExtensionEnabled(Riscv64Extension::kZba)) {
+    if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
+      CZextW(rd);
+    } else {
+      AddUw(rd, rs, Zero);
+    }
+  } else {
+    Slli(rd, rs, kXlen - 32u);
+    Srli(rd, rd, kXlen - 32u);
+  }
 }
 
 void Riscv64Assembler::Seqz(XRegister rd, XRegister rs) { Sltiu(rd, rs, 1); }
diff --git a/compiler/utils/riscv64/assembler_riscv64.h b/compiler/utils/riscv64/assembler_riscv64.h
index 1696251..40c6381 100644
--- a/compiler/utils/riscv64/assembler_riscv64.h
+++ b/compiler/utils/riscv64/assembler_riscv64.h
@@ -41,6 +41,45 @@
 static constexpr size_t kRiscv64DoublewordSize = 8;
 static constexpr size_t kRiscv64FloatRegSizeInBytes = 8;
 
+// The `Riscv64Extension` enumeration is used for restricting the instructions that the assembler
+// can use. Some restrictions are checked only in debug mode (for example load and store
+// instructions check `kLoadStore`), other restrictions are checked at run time and affect the
+// emitted code (for example, the `SextW()` pseudo-instruction selects between an implementation
+// from "Zcb", "Zbb" and a two-instruction sequence from the basic instruction set.
+enum class Riscv64Extension : uint32_t {
+  kLoadStore,  // Pseudo-extension encompassing all loads and stores. Used to check that
+               // we do not have loads and stores in the middle of a LR/SC sequence.
+  kZifencei,
+  kM,
+  kA,
+  kZicsr,
+  kF,
+  kD,
+  kZba,
+  kZbb,
+  kZbs,  // TODO(riscv64): Implement "Zbs" instructions.
+  kV,
+  kZca,  // "C" extension instructions except floating point loads/stores.
+  kZcd,  // "C" extension double loads/stores.
+         // Note: RV64 cannot implement Zcf ("C" extension float loads/stores).
+  kZcb,  // Simple 16-bit operations not present in the original "C" extension.
+
+  kLast = kZcb
+};
+
+using Riscv64ExtensionMask = uint32_t;
+
+constexpr Riscv64ExtensionMask Riscv64ExtensionBit(Riscv64Extension ext) {
+  return 1u << enum_cast<>(ext);
+}
+
+constexpr Riscv64ExtensionMask kRiscv64AllExtensionsMask =
+    MaxInt<Riscv64ExtensionMask>(enum_cast<>(Riscv64Extension::kLast) + 1);
+
+// Extensions allowed in a LR/SC sequence (between the LR and SC).
+constexpr Riscv64ExtensionMask kRiscv64LrScSequenceExtensionsMask =
+    Riscv64ExtensionBit(Riscv64Extension::kZca);
+
 enum class FPRoundingMode : uint32_t {
   kRNE = 0x0,  // Round to Nearest, ties to Even
   kRTZ = 0x1,  // Round towards Zero
@@ -175,6 +214,12 @@
  public:
   explicit Riscv64Assembler(ArenaAllocator* allocator,
                             const Riscv64InstructionSetFeatures* instruction_set_features = nullptr)
+      : Riscv64Assembler(allocator,
+                         instruction_set_features != nullptr
+                             ? ConvertExtensions(instruction_set_features)
+                             : kRiscv64AllExtensionsMask) {}
+
+  Riscv64Assembler(ArenaAllocator* allocator, Riscv64ExtensionMask enabled_extensions)
       : Assembler(allocator),
         branches_(allocator->Adapter(kArenaAllocAssembler)),
         finalized_(false),
@@ -186,9 +231,9 @@
         last_position_adjustment_(0),
         last_old_position_(0),
         last_branch_id_(0),
+        enabled_extensions_(enabled_extensions),
         available_scratch_core_registers_((1u << TMP) | (1u << TMP2)),
         available_scratch_fp_registers_(1u << FTMP) {
-    UNUSED(instruction_set_features);
     cfi().DelayEmittingAdvancePCs();
   }
 
@@ -201,6 +246,10 @@
   size_t CodeSize() const override { return Assembler::CodeSize(); }
   DebugFrameOpCodeWriterForAssembler& cfi() { return Assembler::cfi(); }
 
+  bool IsExtensionEnabled(Riscv64Extension ext) {
+    return (enabled_extensions_ & Riscv64ExtensionBit(ext)) != 0u;
+  }
+
   // According to "The RISC-V Instruction Set Manual"
 
   // LUI/AUIPC (RV32I, with sign-extension on RV64I), opcode = 0x17, 0x37
@@ -530,13 +579,15 @@
   void CLh(XRegister rd_s, XRegister rs1_s, int32_t offset);
   void CSb(XRegister rd_s, XRegister rs1_s, int32_t offset);
   void CSh(XRegister rd_s, XRegister rs1_s, int32_t offset);
-  void CZext_b(XRegister rd_rs1_s);
-  void CSext_b(XRegister rd_rs1_s);
-  void CZext_h(XRegister rd_rs1_s);
-  void CSext_h(XRegister rd_rs1_s);
-  void CZext_w(XRegister rd_rs1_s);
+  void CZextB(XRegister rd_rs1_s);
+  void CSextB(XRegister rd_rs1_s);
+  void CZextH(XRegister rd_rs1_s);
+  void CSextH(XRegister rd_rs1_s);
+  void CZextW(XRegister rd_rs1_s);
   void CNot(XRegister rd_rs1_s);
   void CMul(XRegister rd_s, XRegister rs2_s);
+  // "Zcb" Standard Extension End; resume "C" Standard Extension.
+  // TODO(riscv64): Reorder "Zcb" after remaining "C" instructions.
 
   void CJ(int32_t offset);
   void CJr(XRegister rs1);
@@ -1841,6 +1892,38 @@
   uint32_t GetAdjustedPosition(uint32_t old_position);
 
  private:
+  static uint32_t ConvertExtensions(
+      const Riscv64InstructionSetFeatures* instruction_set_features) {
+    // The `Riscv64InstructionSetFeatures` currently does not support "Zcb",
+    // only the original "C" extension. For riscv64 that means "Zca" and "Zcd".
+    constexpr Riscv64ExtensionMask kCompressedExtensionsMask =
+        Riscv64ExtensionBit(Riscv64Extension::kZca) | Riscv64ExtensionBit(Riscv64Extension::kZcd);
+    return
+        (Riscv64ExtensionBit(Riscv64Extension::kLoadStore)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kZifencei)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kM)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kA)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kZicsr)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kF)) |
+        (Riscv64ExtensionBit(Riscv64Extension::kD)) |
+        (instruction_set_features->HasZba() ? Riscv64ExtensionBit(Riscv64Extension::kZba) : 0u) |
+        (instruction_set_features->HasZbb() ? Riscv64ExtensionBit(Riscv64Extension::kZbb) : 0u) |
+        (instruction_set_features->HasZbs() ? Riscv64ExtensionBit(Riscv64Extension::kZbs) : 0u) |
+        (instruction_set_features->HasVector() ? Riscv64ExtensionBit(Riscv64Extension::kV) : 0u) |
+        (instruction_set_features->HasCompressed() ? kCompressedExtensionsMask : 0u);
+  }
+
+  void AssertExtensionsEnabled(Riscv64Extension ext) {
+    DCHECK(IsExtensionEnabled(ext))
+        << "ext=" << enum_cast<>(ext) << " enabled=0x" << std::hex << enabled_extensions_;
+  }
+
+  template <typename... OtherExt>
+  void AssertExtensionsEnabled(Riscv64Extension ext, OtherExt... other_ext) {
+    AssertExtensionsEnabled(ext);
+    AssertExtensionsEnabled(other_ext...);
+  }
+
   enum BranchCondition : uint8_t {
     kCondEQ,
     kCondNE,
@@ -2610,16 +2693,53 @@
   uint32_t last_old_position_;
   uint32_t last_branch_id_;
 
+  Riscv64ExtensionMask enabled_extensions_;
   uint32_t available_scratch_core_registers_;
   uint32_t available_scratch_fp_registers_;
 
   static constexpr uint32_t kXlen = 64;
 
+  friend class ScopedExtensionsOverride;
   friend class ScratchRegisterScope;
 
   DISALLOW_COPY_AND_ASSIGN(Riscv64Assembler);
 };
 
+class ScopedExtensionsOverride {
+ public:
+  ScopedExtensionsOverride(Riscv64Assembler* assembler, Riscv64ExtensionMask enabled_extensions)
+      : assembler_(assembler),
+        old_enabled_extensions_(assembler->enabled_extensions_) {
+    assembler->enabled_extensions_ = enabled_extensions;
+  }
+
+  ~ScopedExtensionsOverride() {
+    assembler_->enabled_extensions_ = old_enabled_extensions_;
+  }
+
+ protected:
+  static Riscv64ExtensionMask GetEnabledExtensions(Riscv64Assembler* assembler) {
+    return assembler->enabled_extensions_;
+  }
+
+ private:
+  Riscv64Assembler* const assembler_;
+  const Riscv64ExtensionMask old_enabled_extensions_;
+};
+
+template <Riscv64ExtensionMask kMask>
+class ScopedExtensionsRestriction : public ScopedExtensionsOverride {
+ public:
+  explicit ScopedExtensionsRestriction(Riscv64Assembler* assembler)
+      : ScopedExtensionsOverride(assembler, GetEnabledExtensions(assembler) & kMask) {}
+};
+
+template <Riscv64ExtensionMask kMask>
+using ScopedExtensionsExclusion = ScopedExtensionsRestriction<~kMask>;
+
+using ScopedLrScExtensionsRestriction =
+    ScopedExtensionsRestriction<kRiscv64LrScSequenceExtensionsMask>;
+
 class ScratchRegisterScope {
  public:
   explicit ScratchRegisterScope(Riscv64Assembler* assembler)
diff --git a/compiler/utils/riscv64/assembler_riscv64_test.cc b/compiler/utils/riscv64/assembler_riscv64_test.cc
index 87c7641..a327987 100644
--- a/compiler/utils/riscv64/assembler_riscv64_test.cc
+++ b/compiler/utils/riscv64/assembler_riscv64_test.cc
@@ -28,6 +28,11 @@
 namespace art HIDDEN {
 namespace riscv64 {
 
+constexpr Riscv64ExtensionMask kRiscv64CompressedExtensionsMask =
+    Riscv64ExtensionBit(Riscv64Extension::kZca) |
+    Riscv64ExtensionBit(Riscv64Extension::kZcd) |
+    Riscv64ExtensionBit(Riscv64Extension::kZcb);
+
 struct RISCV64CpuRegisterCompare {
   bool operator()(const XRegister& a, const XRegister& b) const { return a < b; }
 };
@@ -42,12 +47,11 @@
   using Base =
       AssemblerTest<Riscv64Assembler, Riscv64Label, XRegister, FRegister, int32_t, VRegister>;
 
-  AssemblerRISCV64Test()
-      : instruction_set_features_(Riscv64InstructionSetFeatures::FromVariant("generic", nullptr)) {}
+  AssemblerRISCV64Test() {}
 
  protected:
   Riscv64Assembler* CreateAssembler(ArenaAllocator* allocator) override {
-    return new (allocator) Riscv64Assembler(allocator, instruction_set_features_.get());
+    return new (allocator) Riscv64Assembler(allocator, kRiscv64AllExtensionsMask);
   }
 
   InstructionSet GetIsa() override { return InstructionSet::kRiscv64; }
@@ -71,10 +75,40 @@
   class ScopedCSuppression {
    public:
     explicit ScopedCSuppression(AssemblerRISCV64Test* test)
-        : smo_(test, "-march=rv64imafdv_zba_zbb") {}
+        : smo_(test, "-march=rv64imafdv_zba_zbb"),
+          exclusion_(test->GetAssembler()) {}
 
    private:
     ScopedMarchOverride smo_;
+    ScopedExtensionsExclusion<kRiscv64CompressedExtensionsMask> exclusion_;
+  };
+
+  class ScopedZbaAndCSuppression {
+   public:
+    explicit ScopedZbaAndCSuppression(AssemblerRISCV64Test* test)
+        : smo_(test, "-march=rv64imafdv_zbb"),
+          exclusion_(test->GetAssembler()) {}
+
+   private:
+    static constexpr Riscv64ExtensionMask kExcludedExtensions =
+        Riscv64ExtensionBit(Riscv64Extension::kZba) | kRiscv64CompressedExtensionsMask;
+
+    ScopedMarchOverride smo_;
+    ScopedExtensionsExclusion<kExcludedExtensions> exclusion_;
+  };
+
+  class ScopedZbbAndCSuppression {
+   public:
+    explicit ScopedZbbAndCSuppression(AssemblerRISCV64Test* test)
+        : smo_(test, "-march=rv64imafdv_zba"),
+          exclusion_(test->GetAssembler()) {}
+
+   private:
+    static constexpr Riscv64ExtensionMask kExcludedExtensions =
+        Riscv64ExtensionBit(Riscv64Extension::kZbb) | kRiscv64CompressedExtensionsMask;
+
+    ScopedMarchOverride smo_;
+    ScopedExtensionsExclusion<kExcludedExtensions> exclusion_;
   };
 
   std::vector<std::string> GetAssemblerCommand() override {
@@ -2186,7 +2220,6 @@
 
   std::map<XRegister, std::string, RISCV64CpuRegisterCompare> secondary_register_names_;
 
-  std::unique_ptr<const Riscv64InstructionSetFeatures> instruction_set_features_;
   std::optional<std::string> march_override_;
 };
 
@@ -3419,24 +3452,24 @@
             "CSh");
 }
 
-TEST_F(AssemblerRISCV64Test, CZext_b) {
-  DriverStr(RepeatCRShort(&Riscv64Assembler::CZext_b, "c.zext.b {reg}"), "CZext_b");
+TEST_F(AssemblerRISCV64Test, CZextB) {
+  DriverStr(RepeatCRShort(&Riscv64Assembler::CZextB, "c.zext.b {reg}"), "CZextB");
 }
 
-TEST_F(AssemblerRISCV64Test, CSext_b) {
-  DriverStr(RepeatCRShort(&Riscv64Assembler::CSext_b, "c.sext.b {reg}"), "CSext_b");
+TEST_F(AssemblerRISCV64Test, CSextB) {
+  DriverStr(RepeatCRShort(&Riscv64Assembler::CSextB, "c.sext.b {reg}"), "CSextB");
 }
 
-TEST_F(AssemblerRISCV64Test, CZext_h) {
-  DriverStr(RepeatCRShort(&Riscv64Assembler::CZext_h, "c.zext.h {reg}"), "CZext_h");
+TEST_F(AssemblerRISCV64Test, CZextH) {
+  DriverStr(RepeatCRShort(&Riscv64Assembler::CZextH, "c.zext.h {reg}"), "CZextH");
 }
 
-TEST_F(AssemblerRISCV64Test, CSext_h) {
-  DriverStr(RepeatCRShort(&Riscv64Assembler::CSext_h, "c.sext.h {reg}"), "CSext_h");
+TEST_F(AssemblerRISCV64Test, CSextH) {
+  DriverStr(RepeatCRShort(&Riscv64Assembler::CSextH, "c.sext.h {reg}"), "CSextH");
 }
 
-TEST_F(AssemblerRISCV64Test, CZext_w) {
-  DriverStr(RepeatCRShort(&Riscv64Assembler::CZext_w, "c.zext.w {reg}"), "CZext_w");
+TEST_F(AssemblerRISCV64Test, CZextW) {
+  DriverStr(RepeatCRShort(&Riscv64Assembler::CZextW, "c.zext.w {reg}"), "CZextW");
 }
 
 TEST_F(AssemblerRISCV64Test, CNot) {
@@ -7875,48 +7908,81 @@
 }
 
 TEST_F(AssemblerRISCV64Test, SextB) {
+  DriverStr(RepeatRR(&Riscv64Assembler::SextB, "sext.b {reg1}, {reg2}\n"), "SextB");
+}
+
+TEST_F(AssemblerRISCV64Test, SextB_WithoutC) {
   ScopedCSuppression scs(this);
-  // Note: SEXT.B from the Zbb extension is not supported.
-  DriverStr(RepeatRR(&Riscv64Assembler::SextB,
-                     "slli {reg1}, {reg2}, 56\n"
-                     "srai {reg1}, {reg1}, 56"),
-            "SextB");
+  DriverStr(RepeatRR(&Riscv64Assembler::SextB, "sext.b {reg1}, {reg2}\n"), "SextB_WithoutC");
+}
+
+// TODO: Add test `SextB_WithoutZbb` when `Slli()` and `Srai()` auto-forward to 16-bit functions.
+TEST_F(AssemblerRISCV64Test, SextB_WithoutZbbAndC) {
+  ScopedZbbAndCSuppression scs(this);
+  DriverStr(RepeatRR(&Riscv64Assembler::SextB, "sext.b {reg1}, {reg2}\n"), "SextB_WithoutZbbAndC");
 }
 
 TEST_F(AssemblerRISCV64Test, SextH) {
+  DriverStr(RepeatRR(&Riscv64Assembler::SextH, "sext.h {reg1}, {reg2}\n"), "SextH");
+}
+
+TEST_F(AssemblerRISCV64Test, SextH_WithoutC) {
   ScopedCSuppression scs(this);
-  // Note: SEXT.H from the Zbb extension is not supported.
-  DriverStr(RepeatRR(&Riscv64Assembler::SextH,
-                     "slli {reg1}, {reg2}, 48\n"
-                     "srai {reg1}, {reg1}, 48"),
-            "SextH");
+  DriverStr(RepeatRR(&Riscv64Assembler::SextH, "sext.h {reg1}, {reg2}\n"), "SextH_WithoutC");
+}
+
+// TODO: Add test `SextH_WithoutZbb` when `Slli()` and `Srai()` auto-forward to 16-bit functions.
+TEST_F(AssemblerRISCV64Test, SextH_WithoutZbbAndC) {
+  ScopedZbbAndCSuppression scs(this);
+  DriverStr(RepeatRR(&Riscv64Assembler::SextH, "sext.h {reg1}, {reg2}\n"), "SextH_WithoutZbbAndC");
 }
 
 TEST_F(AssemblerRISCV64Test, SextW) {
+  DriverStr(RepeatRR(&Riscv64Assembler::SextW, "sext.w {reg1}, {reg2}\n"), "SextW");
+}
+
+TEST_F(AssemblerRISCV64Test, SextW_WithoutC) {
   ScopedCSuppression scs(this);
-  DriverStr(RepeatRR(&Riscv64Assembler::SextW, "addiw {reg1}, {reg2}, 0\n"), "SextW");
+  DriverStr(RepeatRR(&Riscv64Assembler::SextW, "sext.w {reg1}, {reg2}\n"), "SextW_WithoutC");
 }
 
 TEST_F(AssemblerRISCV64Test, ZextB) {
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextB, "zext.b {reg1}, {reg2}"), "ZextB");
+}
+
+TEST_F(AssemblerRISCV64Test, ZextB_WithoutC) {
   ScopedCSuppression scs(this);
-  DriverStr(RepeatRR(&Riscv64Assembler::ZextB, "andi {reg1}, {reg2}, 255"), "ZextB");
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextB, "zext.b {reg1}, {reg2}"), "ZextB_WithoutC");
 }
 
 TEST_F(AssemblerRISCV64Test, ZextH) {
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextH, "zext.h {reg1}, {reg2}\n"), "ZextH");
+}
+
+TEST_F(AssemblerRISCV64Test, ZextH_WithoutC) {
   ScopedCSuppression scs(this);
-  // Note: ZEXT.H from the Zbb extension is not supported.
-  DriverStr(RepeatRR(&Riscv64Assembler::ZextH,
-                     "slli {reg1}, {reg2}, 48\n"
-                     "srli {reg1}, {reg1}, 48"),
-            "SextH");
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextH, "zext.h {reg1}, {reg2}\n"), "ZextH_WithoutC");
+}
+
+// TODO: Add test `ZextH_WithoutZbb` when `Slli()` and `Srli()` auto-forward to 16-bit functions.
+TEST_F(AssemblerRISCV64Test, ZextH_WithoutZbbAndC) {
+  ScopedZbbAndCSuppression scs(this);
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextH, "zext.h {reg1}, {reg2}\n"), "ZextH_WithoutZbbAndC");
 }
 
 TEST_F(AssemblerRISCV64Test, ZextW) {
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextW, "zext.w {reg1}, {reg2}\n"), "ZextW");
+}
+
+TEST_F(AssemblerRISCV64Test, ZextW_WithoutC) {
   ScopedCSuppression scs(this);
-  DriverStr(RepeatRR(&Riscv64Assembler::ZextW,
-                     "slli {reg1}, {reg2}, 32\n"
-                     "srli {reg1}, {reg1}, 32"),
-            "ZextW");
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextW, "zext.w {reg1}, {reg2}\n"), "ZextW_WithoutC");
+}
+
+// TODO: Add test `ZextW_WithoutZba` when `Slli()` and `Srli()` auto-forward to 16-bit functions.
+TEST_F(AssemblerRISCV64Test, ZextW_WithoutZbaAndC) {
+  ScopedZbaAndCSuppression scs(this);
+  DriverStr(RepeatRR(&Riscv64Assembler::ZextW, "zext.w {reg1}, {reg2}\n"), "ZextW_WithoutZbaAndC");
 }
 
 TEST_F(AssemblerRISCV64Test, Seqz) {
diff --git a/compiler/utils/riscv64/jni_macro_assembler_riscv64.cc b/compiler/utils/riscv64/jni_macro_assembler_riscv64.cc
index 00e1f54..e2ef9c4 100644
--- a/compiler/utils/riscv64/jni_macro_assembler_riscv64.cc
+++ b/compiler/utils/riscv64/jni_macro_assembler_riscv64.cc
@@ -472,10 +472,13 @@
   __ Bind(&retry);
   static_assert(thread_flags_offset.Int32Value() == 0);  // LR/SC require exact address.
   __ LrW(scratch, TR, AqRl::kNone);
-  __ Li(scratch2, kNativeStateValue);
-  // If any flags are set, go to the slow path.
-  static_assert(kRunnableStateValue == 0u);
-  __ Bnez(scratch, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64());
+  {
+    ScopedLrScExtensionsRestriction slser(&asm_);
+    __ Li(scratch2, kNativeStateValue);
+    // If any flags are set, go to the slow path.
+    static_assert(kRunnableStateValue == 0u);
+    __ Bnez(scratch, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64());
+  }
   __ ScW(scratch, scratch2, TR, AqRl::kRelease);
   __ Bnez(scratch, &retry);
 
@@ -506,11 +509,14 @@
   __ Bind(&retry);
   static_assert(thread_flags_offset.Int32Value() == 0);  // LR/SC require exact address.
   __ LrW(scratch, TR, AqRl::kAcquire);
-  __ Li(scratch2, kNativeStateValue);
-  // If any flags are set, or the state is not Native, go to the slow path.
-  // (While the thread can theoretically transition between different Suspended states,
-  // it would be very unexpected to see a state other than Native at this point.)
-  __ Bne(scratch, scratch2, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64());
+  {
+    ScopedLrScExtensionsRestriction slser(&asm_);
+    __ Li(scratch2, kNativeStateValue);
+    // If any flags are set, or the state is not Native, go to the slow path.
+    // (While the thread can theoretically transition between different Suspended states,
+    // it would be very unexpected to see a state other than Native at this point.)
+    __ Bne(scratch, scratch2, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64());
+  }
   static_assert(kRunnableStateValue == 0u);
   __ ScW(scratch, Zero, TR, AqRl::kNone);
   __ Bnez(scratch, &retry);