diff --git a/src/base/macros.h b/src/base/macros.h index a55970c55e95..2e7a7e9482e4 100644 --- a/src/base/macros.h +++ b/src/base/macros.h @@ -11,7 +11,9 @@ #include "src/base/compiler-specific.h" #include "src/base/logging.h" #include "src/base/platform/wrappers.h" +#if defined(__CHERI_PURE_CAPABILITY__) #include "src/common/cheri.h" +#endif // defined(__CHERI_PURE_CAPABILITY__) // No-op macro which is used to work around MSVC's funky VA_ARGS support. #define EXPAND(x) x diff --git a/src/builtins/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc index 4bb3a5493a36..0d915e8bc567 100644 --- a/src/builtins/arm64/builtins-arm64.cc +++ b/src/builtins/arm64/builtins-arm64.cc @@ -2469,9 +2469,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { #else // defined(__CHERI_PURE_CAPABILITY__) __ Ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); __ LoadTaggedPointerField( - x1, FieldMemOperand(c1, JSFunction::kSharedFunctionInfoOffset)); + x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); __ LoadTaggedPointerField( - x1, FieldMemOperand(c1, SharedFunctionInfo::kFunctionDataOffset)); + x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset)); __ CompareObjectType(x1, kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister, #endif // defined(__CHERI_PURE_CAPABILITY__) @@ -2828,7 +2828,7 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, #if defined(__CHERI_PURE_CAPABILITY__) c1, FieldMemOperand(x1, FixedArray::OffsetOfElementAt( #else // defined(__CHERI_PURE_CAPABILITY__) - c1, FieldMemOperand(x1, FixedArray::OffsetOfElementAt( + x1, FieldMemOperand(x1, FixedArray::OffsetOfElementAt( #endif // defined(__CHERI_PURE_CAPABILITY__) DeoptimizationData::kOsrPcOffsetIndex))); diff --git a/src/codegen/arm64/instructions-arm64.cc b/src/codegen/arm64/instructions-arm64.cc index 95a5e5950117..d426221ef779 100644 --- a/src/codegen/arm64/instructions-arm64.cc +++ b/src/codegen/arm64/instructions-arm64.cc @@ -54,7 +54,11 @@ bool Instruction::IsLoad() const { bool Instruction::IsStore() const { if (Mask(LoadStoreAnyFMask) == LoadStoreAnyFixed) { +#if defined(__CHERI_PURE_CAPABILITY__) LoadStoreOp op = static_cast(Mask(LoadStoreCapMask)); +#else + LoadStoreOp op = static_cast(Mask(LoadStoreMask)); +#endif // __CHERI_PURE_CAPABILITY__ switch (op) { case STRB_w: case STRH_w: diff --git a/src/codegen/arm64/macro-assembler-arm64-inl.h b/src/codegen/arm64/macro-assembler-arm64-inl.h index 7347ebf83858..14781e90440e 100644 --- a/src/codegen/arm64/macro-assembler-arm64-inl.h +++ b/src/codegen/arm64/macro-assembler-arm64-inl.h @@ -226,9 +226,17 @@ void TurboAssembler::Add(const Register& rd, const Register& rn, DCHECK(allow_macro_instructions()); if (operand.IsImmediate() && (operand.ImmediateValue() < 0) && IsImmAddSub(-operand.ImmediateValue())) { +#if defined(__CHERI_PURE_CAPABILITY__) AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, SubOpFor(rd)); +#else + AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, SUB); +#endif // defined(__CHERI_PURE_CAPABILITY__) } else { +#if defined(__CHERI_PURE_CAPABILITY__) AddSubMacro(rd, rn, operand, LeaveFlags, AddOpFor(rd)); +#else + AddSubMacro(rd, rn, operand, LeaveFlags, ADD); +#endif // defined(__CHERI_PURE_CAPABILITY__) } } @@ -290,9 +298,17 @@ void TurboAssembler::Sub(const Register& rd, const Register& rn, DCHECK(allow_macro_instructions()); if (operand.IsImmediate() && (operand.ImmediateValue() < 0) && IsImmAddSub(-operand.ImmediateValue())) { +#if defined(__CHERI_PURE_CAPABILITY__) AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, AddOpFor(rd)); +#else + AddSubMacro(rd, rn, -operand.ImmediateValue(), LeaveFlags, ADD); +#endif // __CHERI_PURE_CAPABILITY__ } else { +#if defined(__CHERI_PURE_CAPABILITY__) AddSubMacro(rd, rn, operand, LeaveFlags, SubOpFor(rd)); +#else + AddSubMacro(rd, rn, operand, LeaveFlags, SUB); +#endif // __CHERI_PURE_CAPABILITY__ } } diff --git a/src/common/globals.h b/src/common/globals.h index 3dd503897bd5..9dc896746844 100644 --- a/src/common/globals.h +++ b/src/common/globals.h @@ -306,7 +306,7 @@ constexpr size_t kMaxWasmCodeMemory = kMaxWasmCodeMB * MB; #if defined(V8_HOST_CHERI_PURE_CAPABILITY) constexpr int kSystemPointerSizeLog2 = 4; #else -constexpr int kPtrAddrSizeLog2 = 3; +constexpr int kSystemPointerSizeLog2 = 3; #endif constexpr int kSystemPointerAddrSizeLog2 = 3; constexpr int kPtrAddrSizeLog2 = 3; diff --git a/src/compiler/machine-operator.h b/src/compiler/machine-operator.h index 8a5a9811906b..90b05437e248 100644 --- a/src/compiler/machine-operator.h +++ b/src/compiler/machine-operator.h @@ -141,7 +141,11 @@ class StoreRepresentation final { public: StoreRepresentation(MachineRepresentation representation, WriteBarrierKind write_barrier_kind) +#if defined(__CHERI_PURE_CAPABILITY__) : machine_type_(MachineType(representation, MachineSemantic::kCapability)), +#else + : machine_type_(MachineType(representation, MachineSemantic::kAny)), +#endif // defined(__CHERI_PURE_CAPABILITY__) write_barrier_kind_(write_barrier_kind) {} StoreRepresentation(MachineType machine_type, diff --git a/src/sandbox/external-pointer-table.h b/src/sandbox/external-pointer-table.h index 556f04a2bd8c..8e893ee07444 100644 --- a/src/sandbox/external-pointer-table.h +++ b/src/sandbox/external-pointer-table.h @@ -154,13 +154,21 @@ class V8_EXPORT_PRIVATE ExternalPointerTable { // Atomically loads the value at the given index. inline Address load_atomic(uint32_t index) const { +#if defined(__CHERI_PURE_CAPABILITY__) auto addr = reinterpret_cast(entry_address(index)); +#else + auto addr = reinterpret_cast(entry_address(index)); +#endif // defined(__CHERI_PURE_CAPABILITY__) return base::Relaxed_Load(addr); } // Atomically stores the provided value at the given index. inline void store_atomic(uint32_t index, Address value) { +#if defined(__CHERI_PURE_CAPABILITY__) auto addr = reinterpret_cast(entry_address(index)); +#else + auto addr = reinterpret_cast(entry_address(index)); +#endif // defined(__CHERI_PURE_CAPABILITY__) base::Relaxed_Store(addr, value); } diff --git a/src/tracing/trace-event.h b/src/tracing/trace-event.h index 7169036d0181..3b3be3049cd3 100644 --- a/src/tracing/trace-event.h +++ b/src/tracing/trace-event.h @@ -130,7 +130,11 @@ enum CategoryGroupEnabledFlags { // Defines atomic operations used internally by the tracing system. // Acquire/release barriers are important here: crbug.com/1330114#c8. +#if defined(__CHERI_PURE_CAPABILITY__) #define TRACE_EVENT_API_ATOMIC_WORD v8::base::AtomicIntPtr +#else +#define TRACE_EVENT_API_ATOMIC_WORD v8::base::AtomicWord +#endif // defined(__CHERI_PURE_CAPABILITY__) #define TRACE_EVENT_API_ATOMIC_LOAD(var) v8::base::Acquire_Load(&(var)) #define TRACE_EVENT_API_ATOMIC_STORE(var, value) \ v8::base::Release_Store(&(var), (value))