Title: [240832] trunk/Source/_javascript_Core
Revision
240832
Author
mark....@apple.com
Date
2019-01-31 17:37:36 -0800 (Thu, 31 Jan 2019)

Log Message

Remove poisoning from CodeBlock and LLInt code.
https://bugs.webkit.org/show_bug.cgi?id=194113

Reviewed by Yusuke Suzuki.

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::CodeBlock):
(JSC::CodeBlock::~CodeBlock):
(JSC::CodeBlock::setConstantRegisters):
(JSC::CodeBlock::propagateTransitions):
(JSC::CodeBlock::finalizeLLIntInlineCaches):
(JSC::CodeBlock::jettison):
(JSC::CodeBlock::predictedMachineCodeSize):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::vm const):
(JSC::CodeBlock::addConstant):
(JSC::CodeBlock::heap const):
(JSC::CodeBlock::replaceConstant):
* llint/LLIntOfflineAsmConfig.h:
* llint/LLIntSlowPaths.cpp:
(JSC::LLInt::handleHostCall):
(JSC::LLInt::setUpCall):
* llint/LowLevelInterpreter.asm:
* llint/LowLevelInterpreter32_64.asm:
* llint/LowLevelInterpreter64.asm:

Modified Paths

Diff

Modified: trunk/Source/_javascript_Core/ChangeLog (240831 => 240832)


--- trunk/Source/_javascript_Core/ChangeLog	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/ChangeLog	2019-02-01 01:37:36 UTC (rev 240832)
@@ -1,3 +1,31 @@
+2019-01-31  Mark Lam  <mark....@apple.com>
+
+        Remove poisoning from CodeBlock and LLInt code.
+        https://bugs.webkit.org/show_bug.cgi?id=194113
+
+        Reviewed by Yusuke Suzuki.
+
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::CodeBlock):
+        (JSC::CodeBlock::~CodeBlock):
+        (JSC::CodeBlock::setConstantRegisters):
+        (JSC::CodeBlock::propagateTransitions):
+        (JSC::CodeBlock::finalizeLLIntInlineCaches):
+        (JSC::CodeBlock::jettison):
+        (JSC::CodeBlock::predictedMachineCodeSize):
+        * bytecode/CodeBlock.h:
+        (JSC::CodeBlock::vm const):
+        (JSC::CodeBlock::addConstant):
+        (JSC::CodeBlock::heap const):
+        (JSC::CodeBlock::replaceConstant):
+        * llint/LLIntOfflineAsmConfig.h:
+        * llint/LLIntSlowPaths.cpp:
+        (JSC::LLInt::handleHostCall):
+        (JSC::LLInt::setUpCall):
+        * llint/LowLevelInterpreter.asm:
+        * llint/LowLevelInterpreter32_64.asm:
+        * llint/LowLevelInterpreter64.asm:
+
 2019-01-31  Yusuke Suzuki  <ysuz...@apple.com>
 
         [JSC] Remove finalizer in AsyncFromSyncIteratorPrototype

Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp (240831 => 240832)


--- trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.cpp	2019-02-01 01:37:36 UTC (rev 240832)
@@ -305,7 +305,7 @@
     , m_numBreakpoints(0)
     , m_unlinkedCode(*other.vm(), this, other.m_unlinkedCode.get())
     , m_ownerExecutable(*other.vm(), this, other.m_ownerExecutable.get())
-    , m_poisonedVM(other.m_poisonedVM)
+    , m_vm(other.m_vm)
     , m_instructions(other.m_instructions)
     , m_instructionsRawPointer(other.m_instructionsRawPointer)
     , m_instructionCount(other.m_instructionCount)
@@ -371,7 +371,7 @@
     , m_numBreakpoints(0)
     , m_unlinkedCode(*vm, this, unlinkedCodeBlock)
     , m_ownerExecutable(*vm, this, ownerExecutable)
-    , m_poisonedVM(vm)
+    , m_vm(vm)
     , m_instructions(&unlinkedCodeBlock->instructions())
     , m_instructionsRawPointer(m_instructions->rawPointer())
     , m_thisRegister(unlinkedCodeBlock->thisRegister())
@@ -834,7 +834,7 @@
 
 CodeBlock::~CodeBlock()
 {
-    VM& vm = *m_poisonedVM;
+    VM& vm = *m_vm;
 
     vm.heap.codeBlockSet().remove(this);
     
@@ -894,7 +894,7 @@
 
 void CodeBlock::setConstantRegisters(const Vector<WriteBarrier<Unknown>>& constants, const Vector<SourceCodeRepresentation>& constantsSourceCodeRepresentation)
 {
-    VM& vm = *m_poisonedVM;
+    VM& vm = *m_vm;
     auto scope = DECLARE_THROW_SCOPE(vm);
     JSGlobalObject* globalObject = m_globalObject.get();
     ExecState* exec = globalObject->globalExec();
@@ -1087,7 +1087,7 @@
 {
     UNUSED_PARAM(visitor);
 
-    VM& vm = *m_poisonedVM;
+    VM& vm = *m_vm;
 
     if (jitType() == JITCode::InterpreterThunk) {
         const Vector<InstructionStream::Offset>& propertyAccessInstructions = m_unlinkedCode->propertyAccessInstructions();
@@ -1203,7 +1203,7 @@
 
 void CodeBlock::finalizeLLIntInlineCaches()
 {
-    VM& vm = *m_poisonedVM;
+    VM& vm = *m_vm;
     const Vector<InstructionStream::Offset>& propertyAccessInstructions = m_unlinkedCode->propertyAccessInstructions();
 
     auto handleGetPutFromScope = [](auto& metadata) {
@@ -1921,7 +1921,7 @@
     }
 #endif // ENABLE(DFG_JIT)
 
-    VM& vm = *m_poisonedVM;
+    VM& vm = *m_vm;
     DeferGCForAWhile deferGC(*heap());
     
     // We want to accomplish two things here:
@@ -2762,8 +2762,8 @@
 
 size_t CodeBlock::predictedMachineCodeSize()
 {
-    VM* vm = m_poisonedVM.unpoisoned();
-    // This will be called from CodeBlock::CodeBlock before either m_poisonedVM or the
+    VM* vm = m_vm;
+    // This will be called from CodeBlock::CodeBlock before either m_vm or the
     // instructions have been initialized. It's OK to return 0 because what will really
     // matter is the recomputation of this value when the slow path is triggered.
     if (!vm)

Modified: trunk/Source/_javascript_Core/bytecode/CodeBlock.h (240831 => 240832)


--- trunk/Source/_javascript_Core/bytecode/CodeBlock.h	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/bytecode/CodeBlock.h	2019-02-01 01:37:36 UTC (rev 240832)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
  * Copyright (C) 2008 Cameron Zwarich <cwzwar...@uwaterloo.ca>
  *
  * Redistribution and use in source and binary forms, with or without
@@ -51,7 +51,6 @@
 #include "JITCode.h"
 #include "JITCodeMap.h"
 #include "JITMathICForwards.h"
-#include "JSCPoison.h"
 #include "JSCast.h"
 #include "JSGlobalObject.h"
 #include "JumpTable.h"
@@ -379,7 +378,7 @@
     
     ExecutableToCodeBlockEdge* ownerEdge() const { return m_ownerEdge.get(); }
 
-    VM* vm() const { return m_poisonedVM.unpoisoned(); }
+    VM* vm() const { return m_vm; }
 
     void setThisRegister(VirtualRegister thisRegister) { m_thisRegister = thisRegister; }
     VirtualRegister thisRegister() const { return m_thisRegister; }
@@ -521,7 +520,7 @@
     {
         unsigned result = m_constantRegisters.size();
         m_constantRegisters.append(WriteBarrier<Unknown>());
-        m_constantRegisters.last().set(*m_poisonedVM, this, v);
+        m_constantRegisters.last().set(*m_vm, this, v);
         m_constantsSourceCodeRepresentation.append(SourceCodeRepresentation::Other);
         return result;
     }
@@ -547,7 +546,7 @@
     const Vector<BitVector>& bitVectors() const { return m_unlinkedCode->bitVectors(); }
     const BitVector& bitVector(size_t i) { return m_unlinkedCode->bitVector(i); }
 
-    Heap* heap() const { return &m_poisonedVM->heap; }
+    Heap* heap() const { return &m_vm->heap; }
     JSGlobalObject* globalObject() { return m_globalObject.get(); }
 
     JSGlobalObject* globalObjectFor(CodeOrigin);
@@ -897,7 +896,7 @@
     void replaceConstant(int index, JSValue value)
     {
         ASSERT(isConstantRegisterIndex(index) && static_cast<size_t>(index - FirstConstantRegisterIndex) < m_constantRegisters.size());
-        m_constantRegisters[index - FirstConstantRegisterIndex].set(*m_poisonedVM, this, value);
+        m_constantRegisters[index - FirstConstantRegisterIndex].set(*m_vm, this, value);
     }
 
     bool shouldVisitStrongly(const ConcurrentJSLocker&);
@@ -945,7 +944,7 @@
     WriteBarrier<UnlinkedCodeBlock> m_unlinkedCode;
     WriteBarrier<ExecutableBase> m_ownerExecutable;
     WriteBarrier<ExecutableToCodeBlockEdge> m_ownerEdge;
-    Poisoned<CodeBlockPoison, VM*> m_poisonedVM;
+    VM* m_vm;
 
     const InstructionStream* m_instructions;
     const void* m_instructionsRawPointer { nullptr };
@@ -954,24 +953,24 @@
     VirtualRegister m_scopeRegister;
     mutable CodeBlockHash m_hash;
 
-    PoisonedRefPtr<CodeBlockPoison, SourceProvider> m_source;
+    RefPtr<SourceProvider> m_source;
     unsigned m_sourceOffset;
     unsigned m_firstLineColumnOffset;
 
     SentinelLinkedList<LLIntCallLinkInfo, BasicRawSentinelNode<LLIntCallLinkInfo>> m_incomingLLIntCalls;
     StructureWatchpointMap m_llintGetByIdWatchpointMap;
-    PoisonedRefPtr<CodeBlockPoison, JITCode> m_jitCode;
+    RefPtr<JITCode> m_jitCode;
 #if !ENABLE(C_LOOP)
     std::unique_ptr<RegisterAtOffsetList> m_calleeSaveRegisters;
 #endif
 #if ENABLE(JIT)
-    PoisonedBag<CodeBlockPoison, StructureStubInfo> m_stubInfos;
-    PoisonedBag<CodeBlockPoison, JITAddIC> m_addICs;
-    PoisonedBag<CodeBlockPoison, JITMulIC> m_mulICs;
-    PoisonedBag<CodeBlockPoison, JITNegIC> m_negICs;
-    PoisonedBag<CodeBlockPoison, JITSubIC> m_subICs;
-    PoisonedBag<CodeBlockPoison, ByValInfo> m_byValInfos;
-    PoisonedBag<CodeBlockPoison, CallLinkInfo> m_callLinkInfos;
+    Bag<StructureStubInfo> m_stubInfos;
+    Bag<JITAddIC> m_addICs;
+    Bag<JITMulIC> m_mulICs;
+    Bag<JITNegIC> m_negICs;
+    Bag<JITSubIC> m_subICs;
+    Bag<ByValInfo> m_byValInfos;
+    Bag<CallLinkInfo> m_callLinkInfos;
     SentinelLinkedList<CallLinkInfo, BasicRawSentinelNode<CallLinkInfo>> m_incomingCalls;
     SentinelLinkedList<PolymorphicCallNode, BasicRawSentinelNode<PolymorphicCallNode>> m_incomingPolymorphicCalls;
     std::unique_ptr<PCToCodeOriginMap> m_pcToCodeOriginMap;

Modified: trunk/Source/_javascript_Core/llint/LLIntOfflineAsmConfig.h (240831 => 240832)


--- trunk/Source/_javascript_Core/llint/LLIntOfflineAsmConfig.h	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/llint/LLIntOfflineAsmConfig.h	2019-02-01 01:37:36 UTC (rev 240832)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2019 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -28,7 +28,6 @@
 #include "LLIntCommon.h"
 #include <wtf/Assertions.h>
 #include <wtf/Gigacage.h>
-#include <wtf/Poisoned.h>
 
 #if ENABLE(C_LOOP)
 #define OFFLINE_ASM_C_LOOP 1
@@ -136,12 +135,6 @@
 #define OFFLINE_ASM_ADDRESS64 0
 #endif
 
-#if ENABLE(POISON)
-#define OFFLINE_ASM_POISON 1
-#else
-#define OFFLINE_ASM_POISON 0
-#endif
-
 #if !ASSERT_DISABLED
 #define OFFLINE_ASM_ASSERT_ENABLED 1
 #else

Modified: trunk/Source/_javascript_Core/llint/LLIntSlowPaths.cpp (240831 => 240832)


--- trunk/Source/_javascript_Core/llint/LLIntSlowPaths.cpp	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/llint/LLIntSlowPaths.cpp	2019-02-01 01:37:36 UTC (rev 240832)
@@ -1448,8 +1448,6 @@
             NativeCallFrameTracer tracer(&vm, execCallee);
             execCallee->setCallee(asObject(callee));
             vm.hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
-            
-            PoisonedMasmPtr::assertIsNotPoisoned(LLInt::getCodePtr(getHostCallReturnValue));
             LLINT_CALL_RETURN(execCallee, execCallee, LLInt::getCodePtr(getHostCallReturnValue), CFunctionPtrTag);
         }
         
@@ -1470,8 +1468,6 @@
         NativeCallFrameTracer tracer(&vm, execCallee);
         execCallee->setCallee(asObject(callee));
         vm.hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
-
-        PoisonedMasmPtr::assertIsNotPoisoned(LLInt::getCodePtr(getHostCallReturnValue));
         LLINT_CALL_RETURN(execCallee, execCallee, LLInt::getCodePtr(getHostCallReturnValue), CFunctionPtrTag);
     }
     
@@ -1508,7 +1504,6 @@
             }
 
             assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
-            PoisonedMasmPtr::assertIsNotPoisoned(codePtr.executableAddress());
             LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
         }
         RELEASE_AND_RETURN(throwScope, handleHostCall(execCallee, calleeAsValue, kind));
@@ -1559,7 +1554,6 @@
     }
 
     assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
-    PoisonedMasmPtr::assertIsNotPoisoned(codePtr.executableAddress());
     LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
 }
 

Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm (240831 => 240832)


--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm	2019-02-01 01:37:36 UTC (rev 240832)
@@ -841,13 +841,6 @@
     end
 end
 
-macro unpoison(poison, fieldReg, scratch)
-    if POISON
-        loadp poison, scratch
-        xorp scratch, fieldReg
-    end
-end
-
 macro functionPrologue()
     tagReturnAddress sp
     if X86 or X86_WIN or X86_64 or X86_64_WIN
@@ -1027,7 +1020,7 @@
     end)
 end
 
-macro functionForCallCodeBlockGetter(targetRegister, scratch)
+macro functionForCallCodeBlockGetter(targetRegister)
     if JSVALUE64
         loadp Callee[cfr], targetRegister
     else
@@ -1034,12 +1027,11 @@
         loadp Callee + PayloadOffset[cfr], targetRegister
     end
     loadp JSFunction::m_executable[targetRegister], targetRegister
-    unpoison(_g_JSFunctionPoison, targetRegister, scratch)
     loadp FunctionExecutable::m_codeBlockForCall[targetRegister], targetRegister
     loadp ExecutableToCodeBlockEdge::m_codeBlock[targetRegister], targetRegister
 end
 
-macro functionForConstructCodeBlockGetter(targetRegister, scratch)
+macro functionForConstructCodeBlockGetter(targetRegister)
     if JSVALUE64
         loadp Callee[cfr], targetRegister
     else
@@ -1046,12 +1038,11 @@
         loadp Callee + PayloadOffset[cfr], targetRegister
     end
     loadp JSFunction::m_executable[targetRegister], targetRegister
-    unpoison(_g_JSFunctionPoison, targetRegister, scratch)
     loadp FunctionExecutable::m_codeBlockForConstruct[targetRegister], targetRegister
     loadp ExecutableToCodeBlockEdge::m_codeBlock[targetRegister], targetRegister
 end
 
-macro notFunctionCodeBlockGetter(targetRegister, ignored)
+macro notFunctionCodeBlockGetter(targetRegister)
     loadp CodeBlock[cfr], targetRegister
 end
 
@@ -1075,7 +1066,7 @@
         callSlowPath(traceSlowPath)
         addp maxFrameExtentForSlowPathCall, sp
     end
-    codeBlockGetter(t1, t2)
+    codeBlockGetter(t1)
     if not C_LOOP
         baddis 5, CodeBlock::m_llintExecuteCounter + BaselineExecutionCounter::m_counter[t1], .continue
         if JSVALUE64
@@ -1105,7 +1096,7 @@
         end
         jmp r0, JSEntryPtrTag
     .recover:
-        codeBlockGetter(t1, t2)
+        codeBlockGetter(t1)
     .continue:
     end
 
@@ -1116,7 +1107,6 @@
     # Set up the PC.
     if JSVALUE64
         loadp CodeBlock::m_instructionsRawPointer[t1], PB
-        unpoison(_g_CodeBlockPoison, PB, t3)
         move 0, PC
     else
         loadp CodeBlock::m_instructionsRawPointer[t1], PC
@@ -1126,8 +1116,7 @@
     getFrameRegisterSizeForCodeBlock(t1, t0)
     subp cfr, t0, t0
     bpa t0, cfr, .needStackCheck
-    loadp CodeBlock::m_poisonedVM[t1], t2
-    unpoison(_g_CodeBlockPoison, t2, t3)
+    loadp CodeBlock::m_vm[t1], t2
     if C_LOOP
         bpbeq VM::m_cloopStackLimit[t2], t0, .stackHeightOK
     else
@@ -1152,7 +1141,7 @@
 .stackHeightOKGetCodeBlock:
     # Stack check slow path returned that the stack was ok.
     # Since they were clobbered, need to get CodeBlock and new sp
-    codeBlockGetter(t1, t2)
+    codeBlockGetter(t1)
     getFrameRegisterSizeForCodeBlock(t1, t0)
     subp cfr, t0, t0
 
@@ -1640,8 +1629,7 @@
 
 llintOp(op_check_traps, OpCheckTraps, macro (unused, unused, dispatch)
     loadp CodeBlock[cfr], t1
-    loadp CodeBlock::m_poisonedVM[t1], t1
-    unpoison(_g_CodeBlockPoison, t1, t2)
+    loadp CodeBlock::m_vm[t1], t1
     loadb VM::m_traps+VMTraps::m_needTrapHandling[t1], t0
     btpnz t0, .handleTraps
 .afterHandlingTraps:
@@ -1657,8 +1645,7 @@
 # Returns the packet pointer in t0.
 macro acquireShadowChickenPacket(slow)
     loadp CodeBlock[cfr], t1
-    loadp CodeBlock::m_poisonedVM[t1], t1
-    unpoison(_g_CodeBlockPoison, t1, t2)
+    loadp CodeBlock::m_vm[t1], t1
     loadp VM::m_shadowChicken[t1], t2
     loadp ShadowChicken::m_logCursor[t2], t0
     bpaeq t0, ShadowChicken::m_logEnd[t2], slow

Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm (240831 => 240832)


--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm	2019-02-01 01:37:36 UTC (rev 240832)
@@ -2411,8 +2411,7 @@
 
 llintOpWithMetadata(op_profile_type, OpProfileType, macro (size, get, dispatch, metadata, return)
     loadp CodeBlock[cfr], t1
-    loadp CodeBlock::m_poisonedVM[t1], t1
-    unpoison(_g_CodeBlockPoison, t1, t2)
+    loadp CodeBlock::m_vm[t1], t1
     # t1 is holding the pointer to the typeProfilerLog.
     loadp VM::m_typeProfilerLog[t1], t1
 

Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter64.asm (240831 => 240832)


--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter64.asm	2019-02-01 01:18:49 UTC (rev 240831)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter64.asm	2019-02-01 01:37:36 UTC (rev 240832)
@@ -80,7 +80,6 @@
     loadi ArgumentCount + TagOffset[cfr], PC
     loadp CodeBlock[cfr], PB
     loadp CodeBlock::m_instructionsRawPointer[PB], PB
-    unpoison(_g_CodeBlockPoison, PB, t1)
     get(size, opcodeStruct, m_dst, t1)
     storeq r0, [cfr, t1, 8]
     metadata(size, opcodeStruct, t2, t1)
@@ -531,17 +530,16 @@
         end)
 end
 
-macro structureIDToStructureWithScratch(structureIDThenStructure, scratch, scratch2)
+macro structureIDToStructureWithScratch(structureIDThenStructure, scratch)
     loadp CodeBlock[cfr], scratch
-    loadp CodeBlock::m_poisonedVM[scratch], scratch
-    unpoison(_g_CodeBlockPoison, scratch, scratch2)
+    loadp CodeBlock::m_vm[scratch], scratch
     loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[scratch], scratch
     loadp [scratch, structureIDThenStructure, PtrSize], structureIDThenStructure
 end
 
-macro loadStructureWithScratch(cell, structure, scratch, scratch2)
+macro loadStructureWithScratch(cell, structure, scratch)
     loadi JSCell::m_structureID[cell], structure
-    structureIDToStructureWithScratch(structure, scratch, scratch2)
+    structureIDToStructureWithScratch(structure, scratch)
 end
 
 # Entrypoints into the interpreter.
@@ -629,7 +627,6 @@
     # Reload CodeBlock and reset PC, since the slow_path clobbered them.
     loadp CodeBlock[cfr], t1
     loadp CodeBlock::m_instructionsRawPointer[t1], PB
-    unpoison(_g_CodeBlockPoison, PB, t2)
     move 0, PC
     jmp doneLabel
 end
@@ -697,7 +694,7 @@
     loadq [cfr, t0, 8], t0
     btqnz t0, tagMask, .opToThisSlow
     bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
-    loadStructureWithScratch(t0, t1, t2, t3)
+    loadStructureWithScratch(t0, t1, t2)
     metadata(t2, t3)
     loadp OpToThis::Metadata::m_cachedStructure[t2], t2
     bpneq t1, t2, .opToThisSlow
@@ -767,7 +764,7 @@
         move 0, t0
         jmp .done
     .masqueradesAsUndefined:
-        loadStructureWithScratch(t0, t2, t1, t3)
+        loadStructureWithScratch(t0, t2, t1)
         loadp CodeBlock[cfr], t0
         loadp CodeBlock::m_globalObject[t0], t0
         cpeq Structure::m_globalObject[t2], t0, t0
@@ -1185,7 +1182,7 @@
     move ValueFalse, t1
     return(t1)
 .masqueradesAsUndefined:
-    loadStructureWithScratch(t0, t3, t1, t5)
+    loadStructureWithScratch(t0, t3, t1)
     loadp CodeBlock[cfr], t1
     loadp CodeBlock::m_globalObject[t1], t1
     cpeq Structure::m_globalObject[t3], t1, t0
@@ -1356,7 +1353,7 @@
     loadp OpPutById::Metadata::m_structureChain[t5], t3
     btpz t3, .opPutByIdTransitionDirect
 
-    structureIDToStructureWithScratch(t2, t1, t3)
+    structureIDToStructureWithScratch(t2, t1)
 
     # reload the StructureChain since we used t3 as a scratch above
     loadp OpPutById::Metadata::m_structureChain[t5], t3
@@ -1694,7 +1691,7 @@
         assertNotConstant(size, t0)
         loadq [cfr, t0, 8], t0
         btqnz t0, tagMask, .immediate
-        loadStructureWithScratch(t0, t2, t1, t3)
+        loadStructureWithScratch(t0, t2, t1)
         cellHandler(t2, JSCell::m_flags[t0], .target)
         dispatch()
 
@@ -1934,15 +1931,8 @@
         storei PC, ArgumentCount + TagOffset[cfr]
         storei t2, ArgumentCount + PayloadOffset[t3]
         move t3, sp
-        if POISON
-            loadp _g_JITCodePoison, t2
-            xorp %opcodeStruct%::Metadata::m_callLinkInfo.machineCodeTarget[t5], t2
-            prepareCall(t2, t1, t3, t4, JSEntryPtrTag)
-            callTargetFunction(size, opcodeStruct, dispatch, t2, JSEntryPtrTag)
-        else
-            prepareCall(%opcodeStruct%::Metadata::m_callLinkInfo.machineCodeTarget[t5], t2, t3, t4, JSEntryPtrTag)
-            callTargetFunction(size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.machineCodeTarget[t5], JSEntryPtrTag)
-        end
+        prepareCall(%opcodeStruct%::Metadata::m_callLinkInfo.machineCodeTarget[t5], t2, t3, t4, JSEntryPtrTag)
+        callTargetFunction(size, opcodeStruct, dispatch, %opcodeStruct%::Metadata::m_callLinkInfo.machineCodeTarget[t5], JSEntryPtrTag)
 
     .opCallSlow:
         slowPathForCall(size, opcodeStruct, dispatch, slowPath, prepareCall)
@@ -1988,7 +1978,6 @@
     loadp CodeBlock[cfr], PB
     loadp CodeBlock::m_metadata[PB], metadataTable
     loadp CodeBlock::m_instructionsRawPointer[PB], PB
-    unpoison(_g_CodeBlockPoison, PB, t2)
     loadp VM::targetInterpreterPCForThrow[t3], PC
     subp PB, PC
 
@@ -2065,12 +2054,9 @@
     move cfr, a0
     loadp Callee[cfr], t1
     loadp JSFunction::m_executable[t1], t1
-    unpoison(_g_JSFunctionPoison, t1, t2)
     checkStackPointerAlignment(t3, 0xdead0001)
     if C_LOOP
-        loadp _g_NativeCodePoison, t2
-        xorp executableOffsetToFunction[t1], t2
-        cloopCallNative t2
+        cloopCallNative executableOffsetToFunction[t1]
     else
         if X86_64_WIN
             subp 32, sp
@@ -2077,9 +2063,7 @@
             call executableOffsetToFunction[t1], JSEntryPtrTag
             addp 32, sp
         else
-            loadp _g_NativeCodePoison, t2
-            xorp executableOffsetToFunction[t1], t2
-            call t2, JSEntryPtrTag
+            call executableOffsetToFunction[t1], JSEntryPtrTag
         end
     end
 
@@ -2111,9 +2095,7 @@
     loadp Callee[cfr], t1
     checkStackPointerAlignment(t3, 0xdead0001)
     if C_LOOP
-        loadp _g_NativeCodePoison, t2
-        xorp offsetOfFunction[t1], t2
-        cloopCallNative t2
+        cloopCallNative offsetOfFunction[t1]
     else
         if X86_64_WIN
             subp 32, sp
@@ -2120,9 +2102,7 @@
             call offsetOfFunction[t1], JSEntryPtrTag
             addp 32, sp
         else
-            loadp _g_NativeCodePoison, t2
-            xorp offsetOfFunction[t1], t2
-            call t2, JSEntryPtrTag
+            call offsetOfFunction[t1], JSEntryPtrTag
         end
     end
 
@@ -2234,7 +2214,7 @@
 macro loadWithStructureCheck(opcodeStruct, get, slowPath)
     get(m_scope, t0)
     loadq [cfr, t0, 8], t0
-    loadStructureWithScratch(t0, t2, t1, t3)
+    loadStructureWithScratch(t0, t2, t1)
     loadp %opcodeStruct%::Metadata::m_structure[t5], t1
     bpneq t2, t1, slowPath
 end
@@ -2471,8 +2451,7 @@
 
 llintOpWithMetadata(op_profile_type, OpProfileType, macro (size, get, dispatch, metadata, return)
     loadp CodeBlock[cfr], t1
-    loadp CodeBlock::m_poisonedVM[t1], t1
-    unpoison(_g_CodeBlockPoison, t1, t3)
+    loadp CodeBlock::m_vm[t1], t1
     # t1 is holding the pointer to the typeProfilerLog.
     loadp VM::m_typeProfilerLog[t1], t1
     # t2 is holding the pointer to the current log entry.
_______________________________________________
webkit-changes mailing list
webkit-changes@lists.webkit.org
https://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to