Templatize CodePtr/Refs/FunctionPtrs with PtrTags.
https://bugs.webkit.org/show_bug.cgi?id=184702
<rdar://problem/35391681>

Reviewed by Filip Pizlo and Saam Barati.

Source/JavaScriptCore:

1. Templatized MacroAssemblerCodePtr/Ref, FunctionPtr, and CodeLocation variants
   to take a PtrTag template argument.
2. Replaced some uses of raw pointers with the equivalent CodePtr / FunctionPtr.

* assembler/AbstractMacroAssembler.h:
(JSC::AbstractMacroAssembler::differenceBetweenCodePtr):
(JSC::AbstractMacroAssembler::linkJump):
(JSC::AbstractMacroAssembler::linkPointer):
(JSC::AbstractMacroAssembler::getLinkerAddress):
(JSC::AbstractMacroAssembler::repatchJump):
(JSC::AbstractMacroAssembler::repatchJumpToNop):
(JSC::AbstractMacroAssembler::repatchNearCall):
(JSC::AbstractMacroAssembler::repatchCompact):
(JSC::AbstractMacroAssembler::repatchInt32):
(JSC::AbstractMacroAssembler::repatchPointer):
(JSC::AbstractMacroAssembler::readPointer):
(JSC::AbstractMacroAssembler::replaceWithLoad):
(JSC::AbstractMacroAssembler::replaceWithAddressComputation):
* assembler/CodeLocation.h:
(JSC::CodeLocationCommon:: const):
(JSC::CodeLocationCommon::CodeLocationCommon):
(JSC::CodeLocationInstruction::CodeLocationInstruction):
(JSC::CodeLocationLabel::CodeLocationLabel):
(JSC::CodeLocationLabel::retagged):
(JSC::CodeLocationLabel:: const):
(JSC::CodeLocationJump::CodeLocationJump):
(JSC::CodeLocationJump::retagged):
(JSC::CodeLocationCall::CodeLocationCall):
(JSC::CodeLocationCall::retagged):
(JSC::CodeLocationNearCall::CodeLocationNearCall):
(JSC::CodeLocationDataLabel32::CodeLocationDataLabel32):
(JSC::CodeLocationDataLabelCompact::CodeLocationDataLabelCompact):
(JSC::CodeLocationDataLabelPtr::CodeLocationDataLabelPtr):
(JSC::CodeLocationConvertibleLoad::CodeLocationConvertibleLoad):
(JSC::CodeLocationCommon<tag>::instructionAtOffset):
(JSC::CodeLocationCommon<tag>::labelAtOffset):
(JSC::CodeLocationCommon<tag>::jumpAtOffset):
(JSC::CodeLocationCommon<tag>::callAtOffset):
(JSC::CodeLocationCommon<tag>::nearCallAtOffset):
(JSC::CodeLocationCommon<tag>::dataLabelPtrAtOffset):
(JSC::CodeLocationCommon<tag>::dataLabel32AtOffset):
(JSC::CodeLocationCommon<tag>::dataLabelCompactAtOffset):
(JSC::CodeLocationCommon<tag>::convertibleLoadAtOffset):
(JSC::CodeLocationCommon::instructionAtOffset): Deleted.
(JSC::CodeLocationCommon::labelAtOffset): Deleted.
(JSC::CodeLocationCommon::jumpAtOffset): Deleted.
(JSC::CodeLocationCommon::callAtOffset): Deleted.
(JSC::CodeLocationCommon::nearCallAtOffset): Deleted.
(JSC::CodeLocationCommon::dataLabelPtrAtOffset): Deleted.
(JSC::CodeLocationCommon::dataLabel32AtOffset): Deleted.
(JSC::CodeLocationCommon::dataLabelCompactAtOffset): Deleted.
(JSC::CodeLocationCommon::convertibleLoadAtOffset): Deleted.
* assembler/LinkBuffer.cpp:
(JSC::LinkBuffer::finalizeCodeWithoutDisassemblyImpl):
(JSC::LinkBuffer::finalizeCodeWithDisassemblyImpl):
(JSC::LinkBuffer::finalizeCodeWithoutDisassembly): Deleted.
(JSC::LinkBuffer::finalizeCodeWithDisassembly): Deleted.
* assembler/LinkBuffer.h:
(JSC::LinkBuffer::link):
(JSC::LinkBuffer::patch):
(JSC::LinkBuffer::entrypoint):
(JSC::LinkBuffer::locationOf):
(JSC::LinkBuffer::locationOfNearCall):
(JSC::LinkBuffer::finalizeCodeWithoutDisassembly):
(JSC::LinkBuffer::finalizeCodeWithDisassembly):
(JSC::LinkBuffer::trampolineAt):
* assembler/MacroAssemblerARM.h:
(JSC::MacroAssemblerARM::readCallTarget):
(JSC::MacroAssemblerARM::replaceWithJump):
(JSC::MacroAssemblerARM::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerARM::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerARM::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerARM::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerARM::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerARM::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerARM::repatchCall):
(JSC::MacroAssemblerARM::linkCall):
* assembler/MacroAssemblerARM64.h:
(JSC::MacroAssemblerARM64::readCallTarget):
(JSC::MacroAssemblerARM64::replaceWithVMHalt):
(JSC::MacroAssemblerARM64::replaceWithJump):
(JSC::MacroAssemblerARM64::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerARM64::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerARM64::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerARM64::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerARM64::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerARM64::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerARM64::repatchCall):
(JSC::MacroAssemblerARM64::linkCall):
* assembler/MacroAssemblerARMv7.h:
(JSC::MacroAssemblerARMv7::replaceWithJump):
(JSC::MacroAssemblerARMv7::readCallTarget):
(JSC::MacroAssemblerARMv7::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerARMv7::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerARMv7::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerARMv7::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerARMv7::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerARMv7::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerARMv7::repatchCall):
(JSC::MacroAssemblerARMv7::linkCall):
* assembler/MacroAssemblerCodeRef.cpp:
(JSC::MacroAssemblerCodePtrBase::dumpWithName):
(JSC::MacroAssemblerCodeRefBase::tryToDisassemble):
(JSC::MacroAssemblerCodeRefBase::disassembly):
(JSC::MacroAssemblerCodePtr::createLLIntCodePtr): Deleted.
(JSC::MacroAssemblerCodePtr::dumpWithName const): Deleted.
(JSC::MacroAssemblerCodePtr::dump const): Deleted.
(JSC::MacroAssemblerCodeRef::createLLIntCodeRef): Deleted.
(JSC::MacroAssemblerCodeRef::tryToDisassemble const): Deleted.
(JSC::MacroAssemblerCodeRef::disassembly const): Deleted.
(JSC::MacroAssemblerCodeRef::dump const): Deleted.
* assembler/MacroAssemblerCodeRef.h:
(JSC::FunctionPtr::FunctionPtr):
(JSC::FunctionPtr::retagged const):
(JSC::FunctionPtr::retaggedExecutableAddress const):
(JSC::FunctionPtr::operator== const):
(JSC::FunctionPtr::operator!= const):
(JSC::ReturnAddressPtr::ReturnAddressPtr):
(JSC::MacroAssemblerCodePtr::MacroAssemblerCodePtr):
(JSC::MacroAssemblerCodePtr::createFromExecutableAddress):
(JSC::MacroAssemblerCodePtr::retagged const):
(JSC::MacroAssemblerCodePtr:: const):
(JSC::MacroAssemblerCodePtr::dumpWithName const):
(JSC::MacroAssemblerCodePtr::dump const):
(JSC::MacroAssemblerCodePtrHash::hash):
(JSC::MacroAssemblerCodePtrHash::equal):
(JSC::MacroAssemblerCodeRef::MacroAssemblerCodeRef):
(JSC::MacroAssemblerCodeRef::createSelfManagedCodeRef):
(JSC::MacroAssemblerCodeRef::code const):
(JSC::MacroAssemblerCodeRef::retaggedCode const):
(JSC::MacroAssemblerCodeRef::retagged const):
(JSC::MacroAssemblerCodeRef::tryToDisassemble const):
(JSC::MacroAssemblerCodeRef::disassembly const):
(JSC::MacroAssemblerCodeRef::dump const):
(JSC::FunctionPtr<tag>::FunctionPtr):
* assembler/MacroAssemblerMIPS.h:
(JSC::MacroAssemblerMIPS::readCallTarget):
(JSC::MacroAssemblerMIPS::replaceWithJump):
(JSC::MacroAssemblerMIPS::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerMIPS::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerMIPS::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerMIPS::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerMIPS::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerMIPS::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerMIPS::repatchCall):
(JSC::MacroAssemblerMIPS::linkCall):
* assembler/MacroAssemblerX86.h:
(JSC::MacroAssemblerX86::readCallTarget):
(JSC::MacroAssemblerX86::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerX86::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerX86::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerX86::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerX86::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerX86::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerX86::repatchCall):
(JSC::MacroAssemblerX86::linkCall):
* assembler/MacroAssemblerX86Common.h:
(JSC::MacroAssemblerX86Common::repatchCompact):
(JSC::MacroAssemblerX86Common::replaceWithVMHalt):
(JSC::MacroAssemblerX86Common::replaceWithJump):
* assembler/MacroAssemblerX86_64.h:
(JSC::MacroAssemblerX86_64::readCallTarget):
(JSC::MacroAssemblerX86_64::startOfBranchPtrWithPatchOnRegister):
(JSC::MacroAssemblerX86_64::startOfBranch32WithPatchOnRegister):
(JSC::MacroAssemblerX86_64::startOfPatchableBranchPtrWithPatchOnAddress):
(JSC::MacroAssemblerX86_64::startOfPatchableBranch32WithPatchOnAddress):
(JSC::MacroAssemblerX86_64::revertJumpReplacementToPatchableBranchPtrWithPatch):
(JSC::MacroAssemblerX86_64::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::MacroAssemblerX86_64::revertJumpReplacementToBranchPtrWithPatch):
(JSC::MacroAssemblerX86_64::repatchCall):
(JSC::MacroAssemblerX86_64::linkCall):
* assembler/testmasm.cpp:
(JSC::compile):
(JSC::invoke):
(JSC::testProbeModifiesProgramCounter):
* b3/B3Compilation.cpp:
(JSC::B3::Compilation::Compilation):
* b3/B3Compilation.h:
(JSC::B3::Compilation::code const):
(JSC::B3::Compilation::codeRef const):
* b3/B3Compile.cpp:
(JSC::B3::compile):
* b3/B3LowerMacros.cpp:
* b3/air/AirDisassembler.cpp:
(JSC::B3::Air::Disassembler::dump):
* b3/air/testair.cpp:
* b3/testb3.cpp:
(JSC::B3::invoke):
(JSC::B3::testInterpreter):
(JSC::B3::testEntrySwitchSimple):
(JSC::B3::testEntrySwitchNoEntrySwitch):
(JSC::B3::testEntrySwitchWithCommonPaths):
(JSC::B3::testEntrySwitchWithCommonPathsAndNonTrivialEntrypoint):
(JSC::B3::testEntrySwitchLoop):
* bytecode/AccessCase.cpp:
(JSC::AccessCase::generateImpl):
* bytecode/AccessCaseSnippetParams.cpp:
(JSC::SlowPathCallGeneratorWithArguments::generateImpl):
* bytecode/ByValInfo.h:
(JSC::ByValInfo::ByValInfo):
* bytecode/CallLinkInfo.cpp:
(JSC::CallLinkInfo::callReturnLocation):
(JSC::CallLinkInfo::patchableJump):
(JSC::CallLinkInfo::hotPathBegin):
(JSC::CallLinkInfo::slowPathStart):
* bytecode/CallLinkInfo.h:
(JSC::CallLinkInfo::setCallLocations):
(JSC::CallLinkInfo::hotPathOther):
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::finishCreation):
* bytecode/GetByIdStatus.cpp:
(JSC::GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback):
* bytecode/GetByIdVariant.cpp:
(JSC::GetByIdVariant::GetByIdVariant):
(JSC::GetByIdVariant::dumpInContext const):
* bytecode/GetByIdVariant.h:
(JSC::GetByIdVariant::customAccessorGetter const):
* bytecode/GetterSetterAccessCase.cpp:
(JSC::GetterSetterAccessCase::create):
(JSC::GetterSetterAccessCase::GetterSetterAccessCase):
(JSC::GetterSetterAccessCase::dumpImpl const):
* bytecode/GetterSetterAccessCase.h:
(JSC::GetterSetterAccessCase::customAccessor const):
(): Deleted.
* bytecode/HandlerInfo.h:
(JSC::HandlerInfo::initialize):
* bytecode/InlineAccess.cpp:
(JSC::linkCodeInline):
(JSC::InlineAccess::rewireStubAsJump):
* bytecode/InlineAccess.h:
* bytecode/JumpTable.h:
(JSC::StringJumpTable::ctiForValue):
(JSC::SimpleJumpTable::ctiForValue):
* bytecode/LLIntCallLinkInfo.h:
(JSC::LLIntCallLinkInfo::unlink):
* bytecode/PolymorphicAccess.cpp:
(JSC::AccessGenerationState::emitExplicitExceptionHandler):
(JSC::PolymorphicAccess::regenerate):
* bytecode/PolymorphicAccess.h:
(JSC::AccessGenerationResult::AccessGenerationResult):
(JSC::AccessGenerationResult::code const):
* bytecode/StructureStubInfo.h:
(JSC::StructureStubInfo::slowPathCallLocation):
(JSC::StructureStubInfo::doneLocation):
(JSC::StructureStubInfo::slowPathStartLocation):
(JSC::StructureStubInfo::patchableJumpForIn):
* dfg/DFGCommonData.h:
(JSC::DFG::CommonData::appendCatchEntrypoint):
* dfg/DFGDisassembler.cpp:
(JSC::DFG::Disassembler::dumpDisassembly):
* dfg/DFGDriver.h:
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::linkOSRExits):
(JSC::DFG::JITCompiler::compileExceptionHandlers):
(JSC::DFG::JITCompiler::link):
(JSC::DFG::JITCompiler::compileFunction):
(JSC::DFG::JITCompiler::noticeCatchEntrypoint):
* dfg/DFGJITCompiler.h:
(JSC::DFG::CallLinkRecord::CallLinkRecord):
(JSC::DFG::JITCompiler::appendCall):
(JSC::DFG::JITCompiler::JSCallRecord::JSCallRecord):
(JSC::DFG::JITCompiler::JSDirectCallRecord::JSDirectCallRecord):
(JSC::DFG::JITCompiler::JSDirectTailCallRecord::JSDirectTailCallRecord):
* dfg/DFGJITFinalizer.cpp:
(JSC::DFG::JITFinalizer::JITFinalizer):
(JSC::DFG::JITFinalizer::finalize):
(JSC::DFG::JITFinalizer::finalizeFunction):
* dfg/DFGJITFinalizer.h:
* dfg/DFGJumpReplacement.h:
(JSC::DFG::JumpReplacement::JumpReplacement):
* dfg/DFGNode.h:
* dfg/DFGOSREntry.cpp:
(JSC::DFG::prepareOSREntry):
(JSC::DFG::prepareCatchOSREntry):
* dfg/DFGOSREntry.h:
(JSC::DFG::prepareOSREntry):
* dfg/DFGOSRExit.cpp:
(JSC::DFG::OSRExit::executeOSRExit):
(JSC::DFG::reifyInlinedCallFrames):
(JSC::DFG::adjustAndJumpToTarget):
(JSC::DFG::OSRExit::codeLocationForRepatch const):
(JSC::DFG::OSRExit::emitRestoreArguments):
(JSC::DFG::OSRExit::compileOSRExit):
* dfg/DFGOSRExit.h:
* dfg/DFGOSRExitCompilerCommon.cpp:
(JSC::DFG::handleExitCounts):
(JSC::DFG::reifyInlinedCallFrames):
(JSC::DFG::osrWriteBarrier):
(JSC::DFG::adjustAndJumpToTarget):
* dfg/DFGOperations.cpp:
* dfg/DFGSlowPathGenerator.h:
(JSC::DFG::CallResultAndArgumentsSlowPathGenerator::CallResultAndArgumentsSlowPathGenerator):
(JSC::DFG::CallResultAndArgumentsSlowPathGenerator::unpackAndGenerate):
(JSC::DFG::slowPathCall):
* dfg/DFGSpeculativeJIT.cpp:
(JSC::DFG::SpeculativeJIT::compileMathIC):
(JSC::DFG::SpeculativeJIT::compileCallDOM):
(JSC::DFG::SpeculativeJIT::compileCallDOMGetter):
(JSC::DFG::SpeculativeJIT::emitSwitchIntJump):
(JSC::DFG::SpeculativeJIT::emitSwitchImm):
(JSC::DFG::SpeculativeJIT::emitSwitchStringOnString):
(JSC::DFG::SpeculativeJIT::compileHasIndexedProperty):
(JSC::DFG::SpeculativeJIT::compileGetDirectPname):
(JSC::DFG::SpeculativeJIT::cachedPutById):
* dfg/DFGSpeculativeJIT.h:
(JSC::DFG::SpeculativeJIT::callOperation):
(JSC::DFG::SpeculativeJIT::appendCall):
(JSC::DFG::SpeculativeJIT::appendCallWithCallFrameRollbackOnException):
(JSC::DFG::SpeculativeJIT::appendCallWithCallFrameRollbackOnExceptionSetResult):
(JSC::DFG::SpeculativeJIT::appendCallSetResult):
* dfg/DFGSpeculativeJIT64.cpp:
(JSC::DFG::SpeculativeJIT::cachedGetById):
(JSC::DFG::SpeculativeJIT::cachedGetByIdWithThis):
(JSC::DFG::SpeculativeJIT::compile):
* dfg/DFGThunks.cpp:
(JSC::DFG::osrExitThunkGenerator):
(JSC::DFG::osrExitGenerationThunkGenerator):
(JSC::DFG::osrEntryThunkGenerator):
* dfg/DFGThunks.h:
* disassembler/ARM64Disassembler.cpp:
(JSC::tryToDisassemble):
* disassembler/ARMv7Disassembler.cpp:
(JSC::tryToDisassemble):
* disassembler/Disassembler.cpp:
(JSC::disassemble):
(JSC::disassembleAsynchronously):
* disassembler/Disassembler.h:
(JSC::tryToDisassemble):
* disassembler/UDis86Disassembler.cpp:
(JSC::tryToDisassembleWithUDis86):
* disassembler/UDis86Disassembler.h:
(JSC::tryToDisassembleWithUDis86):
* disassembler/X86Disassembler.cpp:
(JSC::tryToDisassemble):
* ftl/FTLCompile.cpp:
(JSC::FTL::compile):
* ftl/FTLExceptionTarget.cpp:
(JSC::FTL::ExceptionTarget::label):
(JSC::FTL::ExceptionTarget::jumps):
* ftl/FTLExceptionTarget.h:
* ftl/FTLGeneratedFunction.h:
* ftl/FTLJITCode.cpp:
(JSC::FTL::JITCode::initializeB3Code):
(JSC::FTL::JITCode::initializeAddressForCall):
(JSC::FTL::JITCode::initializeArityCheckEntrypoint):
(JSC::FTL::JITCode::addressForCall):
(JSC::FTL::JITCode::executableAddressAtOffset):
* ftl/FTLJITCode.h:
(JSC::FTL::JITCode::b3Code const):
* ftl/FTLJITFinalizer.cpp:
(JSC::FTL::JITFinalizer::finalizeCommon):
* ftl/FTLLazySlowPath.cpp:
(JSC::FTL::LazySlowPath::initialize):
(JSC::FTL::LazySlowPath::generate):
* ftl/FTLLazySlowPath.h:
(JSC::FTL::LazySlowPath::patchableJump const):
(JSC::FTL::LazySlowPath::done const):
(JSC::FTL::LazySlowPath::stub const):
* ftl/FTLLazySlowPathCall.h:
(JSC::FTL::createLazyCallGenerator):
* ftl/FTLLink.cpp:
(JSC::FTL::link):
* ftl/FTLLowerDFGToB3.cpp:
(JSC::FTL::DFG::LowerDFGToB3::lower):
(JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstruct):
(JSC::FTL::DFG::LowerDFGToB3::compileDirectCallOrConstruct):
(JSC::FTL::DFG::LowerDFGToB3::compileTailCall):
(JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargsSpread):
(JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargs):
(JSC::FTL::DFG::LowerDFGToB3::compileCallEval):
(JSC::FTL::DFG::LowerDFGToB3::compileInvalidationPoint):
(JSC::FTL::DFG::LowerDFGToB3::compileIn):
(JSC::FTL::DFG::LowerDFGToB3::compileCheckSubClass):
(JSC::FTL::DFG::LowerDFGToB3::compileCallDOM):
(JSC::FTL::DFG::LowerDFGToB3::compileCallDOMGetter):
(JSC::FTL::DFG::LowerDFGToB3::lazySlowPath):
* ftl/FTLOSRExit.cpp:
(JSC::FTL::OSRExit::codeLocationForRepatch const):
* ftl/FTLOSRExit.h:
* ftl/FTLOSRExitCompiler.cpp:
(JSC::FTL::compileStub):
(JSC::FTL::compileFTLOSRExit):
* ftl/FTLOSRExitHandle.cpp:
(JSC::FTL::OSRExitHandle::emitExitThunk):
* ftl/FTLOperations.cpp:
(JSC::FTL::compileFTLLazySlowPath):
* ftl/FTLPatchpointExceptionHandle.cpp:
(JSC::FTL::PatchpointExceptionHandle::scheduleExitCreationForUnwind):
* ftl/FTLSlowPathCall.cpp:
(JSC::FTL::SlowPathCallContext::keyWithTarget const):
(JSC::FTL::SlowPathCallContext::makeCall):
* ftl/FTLSlowPathCall.h:
(JSC::FTL::callOperation):
* ftl/FTLSlowPathCallKey.cpp:
(JSC::FTL::SlowPathCallKey::dump const):
* ftl/FTLSlowPathCallKey.h:
(JSC::FTL::SlowPathCallKey::SlowPathCallKey):
(JSC::FTL::SlowPathCallKey::callTarget const):
(JSC::FTL::SlowPathCallKey::withCallTarget):
(JSC::FTL::SlowPathCallKey::hash const):
(JSC::FTL::SlowPathCallKey::callPtrTag const): Deleted.
* ftl/FTLState.cpp:
(JSC::FTL::State::State):
* ftl/FTLThunks.cpp:
(JSC::FTL::genericGenerationThunkGenerator):
(JSC::FTL::osrExitGenerationThunkGenerator):
(JSC::FTL::lazySlowPathGenerationThunkGenerator):
(JSC::FTL::slowPathCallThunkGenerator):
* ftl/FTLThunks.h:
(JSC::FTL::generateIfNecessary):
(JSC::FTL::keyForThunk):
(JSC::FTL::Thunks::getSlowPathCallThunk):
(JSC::FTL::Thunks::keyForSlowPathCallThunk):
* interpreter/InterpreterInlines.h:
(JSC::Interpreter::getOpcodeID):
* jit/AssemblyHelpers.cpp:
(JSC::AssemblyHelpers::callExceptionFuzz):
(JSC::AssemblyHelpers::emitDumbVirtualCall):
(JSC::AssemblyHelpers::debugCall):
* jit/CCallHelpers.cpp:
(JSC::CCallHelpers::ensureShadowChickenPacket):
* jit/ExecutableAllocator.cpp:
(JSC::FixedVMPoolExecutableAllocator::initializeSeparatedWXHeaps):
(JSC::FixedVMPoolExecutableAllocator::jitWriteThunkGenerator):
* jit/ExecutableAllocator.h:
(JSC::performJITMemcpy):
* jit/GCAwareJITStubRoutine.cpp:
(JSC::GCAwareJITStubRoutine::GCAwareJITStubRoutine):
(JSC::MarkingGCAwareJITStubRoutine::MarkingGCAwareJITStubRoutine):
(JSC::GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler):
(JSC::createJITStubRoutine):
* jit/GCAwareJITStubRoutine.h:
(JSC::createJITStubRoutine):
* jit/JIT.cpp:
(JSC::ctiPatchCallByReturnAddress):
(JSC::JIT::compileWithoutLinking):
(JSC::JIT::link):
(JSC::JIT::privateCompileExceptionHandlers):
* jit/JIT.h:
(JSC::CallRecord::CallRecord):
* jit/JITArithmetic.cpp:
(JSC::JIT::emitMathICFast):
(JSC::JIT::emitMathICSlow):
* jit/JITCall.cpp:
(JSC::JIT::compileOpCallSlowCase):
* jit/JITCall32_64.cpp:
(JSC::JIT::compileOpCallSlowCase):
* jit/JITCode.cpp:
(JSC::JITCodeWithCodeRef::JITCodeWithCodeRef):
(JSC::JITCodeWithCodeRef::executableAddressAtOffset):
(JSC::DirectJITCode::DirectJITCode):
(JSC::DirectJITCode::initializeCodeRef):
(JSC::DirectJITCode::addressForCall):
(JSC::NativeJITCode::NativeJITCode):
(JSC::NativeJITCode::initializeCodeRef):
(JSC::NativeJITCode::addressForCall):
* jit/JITCode.h:
* jit/JITCodeMap.h:
(JSC::JITCodeMap::Entry::Entry):
(JSC::JITCodeMap::Entry::codeLocation):
(JSC::JITCodeMap::append):
(JSC::JITCodeMap::find const):
* jit/JITDisassembler.cpp:
(JSC::JITDisassembler::dumpDisassembly):
* jit/JITExceptions.cpp:
(JSC::genericUnwind):
* jit/JITInlineCacheGenerator.cpp:
(JSC::JITByIdGenerator::finalize):
* jit/JITInlines.h:
(JSC::JIT::emitNakedCall):
(JSC::JIT::emitNakedTailCall):
(JSC::JIT::appendCallWithExceptionCheck):
(JSC::JIT::appendCallWithExceptionCheckAndSlowPathReturnType):
(JSC::JIT::appendCallWithCallFrameRollbackOnException):
(JSC::JIT::appendCallWithExceptionCheckSetJSValueResult):
(JSC::JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile):
* jit/JITMathIC.h:
(JSC::isProfileEmpty):
* jit/JITOpcodes.cpp:
(JSC::JIT::emit_op_catch):
(JSC::JIT::emit_op_switch_imm):
(JSC::JIT::emit_op_switch_char):
(JSC::JIT::emit_op_switch_string):
(JSC::JIT::privateCompileHasIndexedProperty):
(JSC::JIT::emitSlow_op_has_indexed_property):
* jit/JITOpcodes32_64.cpp:
(JSC::JIT::privateCompileHasIndexedProperty):
* jit/JITOperations.cpp:
(JSC::getByVal):
* jit/JITPropertyAccess.cpp:
(JSC::JIT::stringGetByValStubGenerator):
(JSC::JIT::emitGetByValWithCachedId):
(JSC::JIT::emitSlow_op_get_by_val):
(JSC::JIT::emitPutByValWithCachedId):
(JSC::JIT::emitSlow_op_put_by_val):
(JSC::JIT::emitSlow_op_try_get_by_id):
(JSC::JIT::emitSlow_op_get_by_id_direct):
(JSC::JIT::emitSlow_op_get_by_id):
(JSC::JIT::emitSlow_op_get_by_id_with_this):
(JSC::JIT::emitSlow_op_put_by_id):
(JSC::JIT::privateCompileGetByVal):
(JSC::JIT::privateCompileGetByValWithCachedId):
(JSC::JIT::privateCompilePutByVal):
(JSC::JIT::privateCompilePutByValWithCachedId):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::stringGetByValStubGenerator):
(JSC::JIT::emitSlow_op_get_by_val):
(JSC::JIT::emitSlow_op_put_by_val):
* jit/JITStubRoutine.h:
(JSC::JITStubRoutine::JITStubRoutine):
(JSC::JITStubRoutine::createSelfManagedRoutine):
(JSC::JITStubRoutine::code const):
(JSC::JITStubRoutine::asCodePtr):
* jit/JITThunks.cpp:
(JSC::JITThunks::ctiNativeCall):
(JSC::JITThunks::ctiNativeConstruct):
(JSC::JITThunks::ctiNativeTailCall):
(JSC::JITThunks::ctiNativeTailCallWithoutSavedTags):
(JSC::JITThunks::ctiInternalFunctionCall):
(JSC::JITThunks::ctiInternalFunctionConstruct):
(JSC::JITThunks::ctiStub):
(JSC::JITThunks::existingCTIStub):
(JSC::JITThunks::hostFunctionStub):
* jit/JITThunks.h:
* jit/PCToCodeOriginMap.cpp:
(JSC::PCToCodeOriginMap::PCToCodeOriginMap):
* jit/PCToCodeOriginMap.h:
* jit/PolymorphicCallStubRoutine.cpp:
(JSC::PolymorphicCallStubRoutine::PolymorphicCallStubRoutine):
* jit/PolymorphicCallStubRoutine.h:
* jit/Repatch.cpp:
(JSC::readPutICCallTarget):
(JSC::ftlThunkAwareRepatchCall):
(JSC::appropriateOptimizingGetByIdFunction):
(JSC::appropriateGetByIdFunction):
(JSC::tryCacheGetByID):
(JSC::repatchGetByID):
(JSC::tryCachePutByID):
(JSC::repatchPutByID):
(JSC::tryCacheIn):
(JSC::repatchIn):
(JSC::linkSlowFor):
(JSC::linkFor):
(JSC::linkDirectFor):
(JSC::revertCall):
(JSC::unlinkFor):
(JSC::linkVirtualFor):
(JSC::linkPolymorphicCall):
(JSC::resetGetByID):
(JSC::resetPutByID):
* jit/Repatch.h:
* jit/SlowPathCall.h:
(JSC::JITSlowPathCall::call):
* jit/SpecializedThunkJIT.h:
(JSC::SpecializedThunkJIT::finalize):
(JSC::SpecializedThunkJIT::callDoubleToDouble):
(JSC::SpecializedThunkJIT::callDoubleToDoublePreservingReturn):
* jit/ThunkGenerator.h:
* jit/ThunkGenerators.cpp:
(JSC::throwExceptionFromCallSlowPathGenerator):
(JSC::slowPathFor):
(JSC::linkCallThunkGenerator):
(JSC::linkPolymorphicCallThunkGenerator):
(JSC::virtualThunkFor):
(JSC::nativeForGenerator):
(JSC::nativeCallGenerator):
(JSC::nativeTailCallGenerator):
(JSC::nativeTailCallWithoutSavedTagsGenerator):
(JSC::nativeConstructGenerator):
(JSC::internalFunctionCallGenerator):
(JSC::internalFunctionConstructGenerator):
(JSC::arityFixupGenerator):
(JSC::unreachableGenerator):
(JSC::charCodeAtThunkGenerator):
(JSC::charAtThunkGenerator):
(JSC::fromCharCodeThunkGenerator):
(JSC::clz32ThunkGenerator):
(JSC::sqrtThunkGenerator):
(JSC::floorThunkGenerator):
(JSC::ceilThunkGenerator):
(JSC::truncThunkGenerator):
(JSC::roundThunkGenerator):
(JSC::expThunkGenerator):
(JSC::logThunkGenerator):
(JSC::absThunkGenerator):
(JSC::imulThunkGenerator):
(JSC::randomThunkGenerator):
(JSC::boundThisNoArgsFunctionCallGenerator):
* jit/ThunkGenerators.h:
* llint/LLIntData.cpp:
(JSC::LLInt::initialize):
* llint/LLIntData.h:
(JSC::LLInt::getExecutableAddress):
(JSC::LLInt::getCodePtr):
(JSC::LLInt::getCodeRef):
(JSC::LLInt::getCodeFunctionPtr):
* llint/LLIntEntrypoint.cpp:
(JSC::LLInt::setFunctionEntrypoint):
(JSC::LLInt::setEvalEntrypoint):
(JSC::LLInt::setProgramEntrypoint):
(JSC::LLInt::setModuleProgramEntrypoint):
* llint/LLIntExceptions.cpp:
(JSC::LLInt::callToThrow):
* llint/LLIntSlowPaths.cpp:
(JSC::LLInt::LLINT_SLOW_PATH_DECL):
(JSC::LLInt::setUpCall):
* llint/LLIntThunks.cpp:
(JSC::vmEntryToWasm):
(JSC::LLInt::generateThunkWithJumpTo):
(JSC::LLInt::functionForCallEntryThunkGenerator):
(JSC::LLInt::functionForConstructEntryThunkGenerator):
(JSC::LLInt::functionForCallArityCheckThunkGenerator):
(JSC::LLInt::functionForConstructArityCheckThunkGenerator):
(JSC::LLInt::evalEntryThunkGenerator):
(JSC::LLInt::programEntryThunkGenerator):
(JSC::LLInt::moduleProgramEntryThunkGenerator):
* llint/LLIntThunks.h:
* llint/LowLevelInterpreter.asm:
* llint/LowLevelInterpreter32_64.asm:
* llint/LowLevelInterpreter64.asm:
* profiler/ProfilerCompilation.cpp:
(JSC::Profiler::Compilation::addOSRExitSite):
* profiler/ProfilerCompilation.h:
* profiler/ProfilerOSRExitSite.cpp:
(JSC::Profiler::OSRExitSite::toJS const):
* profiler/ProfilerOSRExitSite.h:
(JSC::Profiler::OSRExitSite::OSRExitSite):
(JSC::Profiler::OSRExitSite::codeAddress const):
(JSC::Profiler::OSRExitSite:: const): Deleted.
* runtime/ExecutableBase.cpp:
(JSC::ExecutableBase::clearCode):
* runtime/ExecutableBase.h:
(JSC::ExecutableBase::entrypointFor):
* runtime/NativeExecutable.cpp:
(JSC::NativeExecutable::finishCreation):
* runtime/NativeFunction.h:
(JSC::TaggedNativeFunction::TaggedNativeFunction):
(JSC::TaggedNativeFunction::operator NativeFunction):
* runtime/PtrTag.h:
(JSC::tagCodePtr):
(JSC::untagCodePtr):
(JSC::retagCodePtr):
(JSC::tagCFunctionPtr):
(JSC::untagCFunctionPtr):
(JSC::nextPtrTagID): Deleted.
* runtime/PutPropertySlot.h:
(JSC::PutPropertySlot::PutPropertySlot):
(JSC::PutPropertySlot::setCustomValue):
(JSC::PutPropertySlot::setCustomAccessor):
(JSC::PutPropertySlot::customSetter const):
* runtime/ScriptExecutable.cpp:
(JSC::ScriptExecutable::installCode):
* runtime/VM.cpp:
(JSC::VM::getHostFunction):
(JSC::VM::getCTIInternalFunctionTrampolineFor):
* runtime/VM.h:
(JSC::VM::getCTIStub):
* wasm/WasmB3IRGenerator.cpp:
(JSC::Wasm::B3IRGenerator::B3IRGenerator):
(JSC::Wasm::B3IRGenerator::emitExceptionCheck):
(JSC::Wasm::B3IRGenerator::emitTierUpCheck):
(JSC::Wasm::B3IRGenerator::addCall):
(JSC::Wasm::B3IRGenerator::addCallIndirect):
* wasm/WasmBBQPlan.cpp:
(JSC::Wasm::BBQPlan::prepare):
(JSC::Wasm::BBQPlan::complete):
* wasm/WasmBBQPlan.h:
* wasm/WasmBinding.cpp:
(JSC::Wasm::wasmToWasm):
* wasm/WasmBinding.h:
* wasm/WasmCallee.h:
(JSC::Wasm::Callee::entrypoint const):
* wasm/WasmCallingConvention.h:
(JSC::Wasm::CallingConvention::setupFrameInPrologue const):
* wasm/WasmCodeBlock.h:
(JSC::Wasm::CodeBlock::entrypointLoadLocationFromFunctionIndexSpace):
* wasm/WasmFaultSignalHandler.cpp:
(JSC::Wasm::trapHandler):
* wasm/WasmFormat.h:
* wasm/WasmInstance.h:
* wasm/WasmOMGPlan.cpp:
(JSC::Wasm::OMGPlan::work):
* wasm/WasmThunks.cpp:
(JSC::Wasm::throwExceptionFromWasmThunkGenerator):
(JSC::Wasm::throwStackOverflowFromWasmThunkGenerator):
(JSC::Wasm::triggerOMGTierUpThunkGenerator):
(JSC::Wasm::Thunks::stub):
(JSC::Wasm::Thunks::existingStub):
* wasm/WasmThunks.h:
* wasm/js/JSToWasm.cpp:
(JSC::Wasm::createJSToWasmWrapper):
* wasm/js/JSWebAssemblyCodeBlock.h:
* wasm/js/WasmToJS.cpp:
(JSC::Wasm::handleBadI64Use):
(JSC::Wasm::wasmToJS):
* wasm/js/WasmToJS.h:
* wasm/js/WebAssemblyFunction.h:
* yarr/YarrJIT.cpp:
(JSC::Yarr::YarrGenerator::loadFromFrameAndJump):
(JSC::Yarr::YarrGenerator::BacktrackingState::linkDataLabels):
(JSC::Yarr::YarrGenerator::compile):
* yarr/YarrJIT.h:
(JSC::Yarr::YarrCodeBlock::set8BitCode):
(JSC::Yarr::YarrCodeBlock::set16BitCode):
(JSC::Yarr::YarrCodeBlock::set8BitCodeMatchOnly):
(JSC::Yarr::YarrCodeBlock::set16BitCodeMatchOnly):
(JSC::Yarr::YarrCodeBlock::execute):
(JSC::Yarr::YarrCodeBlock::clear):

Source/WebCore:

No new tests. This is covered by existing tests.

* WebCore.xcodeproj/project.pbxproj:
* css/ElementRuleCollector.cpp:
(WebCore::ElementRuleCollector::ruleMatches):
* cssjit/CSSPtrTag.h: Added.
* cssjit/CompiledSelector.h:
* cssjit/FunctionCall.h:
(WebCore::FunctionCall::FunctionCall):
(WebCore::FunctionCall::setFunctionAddress):
(WebCore::FunctionCall::prepareAndCall):
* cssjit/SelectorCompiler.cpp:
(WebCore::SelectorCompiler::compileSelector):
(WebCore::SelectorCompiler::SelectorFragment::appendUnoptimizedPseudoClassWithContext):
(WebCore::SelectorCompiler::addPseudoClassType):
(WebCore::SelectorCompiler::SelectorCodeGenerator::compile):
(WebCore::SelectorCompiler::SelectorCodeGenerator::generateElementAttributeFunctionCallValueMatching):
(WebCore::SelectorCompiler::SelectorCodeGenerator::generateElementFunctionCallTest):
(WebCore::SelectorCompiler::SelectorCodeGenerator::generateContextFunctionCallTest):
* cssjit/SelectorCompiler.h:
(WebCore::SelectorCompiler::ruleCollectorSimpleSelectorCheckerFunction):
(WebCore::SelectorCompiler::querySelectorSimpleSelectorCheckerFunction):
(WebCore::SelectorCompiler::ruleCollectorSelectorCheckerFunctionWithCheckingContext):
(WebCore::SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext):
* dom/SelectorQuery.cpp:
(WebCore::SelectorDataList::executeCompiledSingleMultiSelectorData const):
(WebCore::SelectorDataList::execute const):
* dom/SelectorQuery.h:



git-svn-id: http://svn.webkit.org/repository/webkit/trunk@230748 268f45cc-cd09-0410-ab3c-d52691b4dbfc
diff --git a/Source/JavaScriptCore/ChangeLog b/Source/JavaScriptCore/ChangeLog
index e099398..6414be5 100644
--- a/Source/JavaScriptCore/ChangeLog
+++ b/Source/JavaScriptCore/ChangeLog
@@ -1,3 +1,720 @@
+2018-04-17  Mark Lam  <mark.lam@apple.com>
+
+        Templatize CodePtr/Refs/FunctionPtrs with PtrTags.
+        https://bugs.webkit.org/show_bug.cgi?id=184702
+        <rdar://problem/35391681>
+
+        Reviewed by Filip Pizlo and Saam Barati.
+
+        1. Templatized MacroAssemblerCodePtr/Ref, FunctionPtr, and CodeLocation variants
+           to take a PtrTag template argument.
+        2. Replaced some uses of raw pointers with the equivalent CodePtr / FunctionPtr.
+
+        * assembler/AbstractMacroAssembler.h:
+        (JSC::AbstractMacroAssembler::differenceBetweenCodePtr):
+        (JSC::AbstractMacroAssembler::linkJump):
+        (JSC::AbstractMacroAssembler::linkPointer):
+        (JSC::AbstractMacroAssembler::getLinkerAddress):
+        (JSC::AbstractMacroAssembler::repatchJump):
+        (JSC::AbstractMacroAssembler::repatchJumpToNop):
+        (JSC::AbstractMacroAssembler::repatchNearCall):
+        (JSC::AbstractMacroAssembler::repatchCompact):
+        (JSC::AbstractMacroAssembler::repatchInt32):
+        (JSC::AbstractMacroAssembler::repatchPointer):
+        (JSC::AbstractMacroAssembler::readPointer):
+        (JSC::AbstractMacroAssembler::replaceWithLoad):
+        (JSC::AbstractMacroAssembler::replaceWithAddressComputation):
+        * assembler/CodeLocation.h:
+        (JSC::CodeLocationCommon:: const):
+        (JSC::CodeLocationCommon::CodeLocationCommon):
+        (JSC::CodeLocationInstruction::CodeLocationInstruction):
+        (JSC::CodeLocationLabel::CodeLocationLabel):
+        (JSC::CodeLocationLabel::retagged):
+        (JSC::CodeLocationLabel:: const):
+        (JSC::CodeLocationJump::CodeLocationJump):
+        (JSC::CodeLocationJump::retagged):
+        (JSC::CodeLocationCall::CodeLocationCall):
+        (JSC::CodeLocationCall::retagged):
+        (JSC::CodeLocationNearCall::CodeLocationNearCall):
+        (JSC::CodeLocationDataLabel32::CodeLocationDataLabel32):
+        (JSC::CodeLocationDataLabelCompact::CodeLocationDataLabelCompact):
+        (JSC::CodeLocationDataLabelPtr::CodeLocationDataLabelPtr):
+        (JSC::CodeLocationConvertibleLoad::CodeLocationConvertibleLoad):
+        (JSC::CodeLocationCommon<tag>::instructionAtOffset):
+        (JSC::CodeLocationCommon<tag>::labelAtOffset):
+        (JSC::CodeLocationCommon<tag>::jumpAtOffset):
+        (JSC::CodeLocationCommon<tag>::callAtOffset):
+        (JSC::CodeLocationCommon<tag>::nearCallAtOffset):
+        (JSC::CodeLocationCommon<tag>::dataLabelPtrAtOffset):
+        (JSC::CodeLocationCommon<tag>::dataLabel32AtOffset):
+        (JSC::CodeLocationCommon<tag>::dataLabelCompactAtOffset):
+        (JSC::CodeLocationCommon<tag>::convertibleLoadAtOffset):
+        (JSC::CodeLocationCommon::instructionAtOffset): Deleted.
+        (JSC::CodeLocationCommon::labelAtOffset): Deleted.
+        (JSC::CodeLocationCommon::jumpAtOffset): Deleted.
+        (JSC::CodeLocationCommon::callAtOffset): Deleted.
+        (JSC::CodeLocationCommon::nearCallAtOffset): Deleted.
+        (JSC::CodeLocationCommon::dataLabelPtrAtOffset): Deleted.
+        (JSC::CodeLocationCommon::dataLabel32AtOffset): Deleted.
+        (JSC::CodeLocationCommon::dataLabelCompactAtOffset): Deleted.
+        (JSC::CodeLocationCommon::convertibleLoadAtOffset): Deleted.
+        * assembler/LinkBuffer.cpp:
+        (JSC::LinkBuffer::finalizeCodeWithoutDisassemblyImpl):
+        (JSC::LinkBuffer::finalizeCodeWithDisassemblyImpl):
+        (JSC::LinkBuffer::finalizeCodeWithoutDisassembly): Deleted.
+        (JSC::LinkBuffer::finalizeCodeWithDisassembly): Deleted.
+        * assembler/LinkBuffer.h:
+        (JSC::LinkBuffer::link):
+        (JSC::LinkBuffer::patch):
+        (JSC::LinkBuffer::entrypoint):
+        (JSC::LinkBuffer::locationOf):
+        (JSC::LinkBuffer::locationOfNearCall):
+        (JSC::LinkBuffer::finalizeCodeWithoutDisassembly):
+        (JSC::LinkBuffer::finalizeCodeWithDisassembly):
+        (JSC::LinkBuffer::trampolineAt):
+        * assembler/MacroAssemblerARM.h:
+        (JSC::MacroAssemblerARM::readCallTarget):
+        (JSC::MacroAssemblerARM::replaceWithJump):
+        (JSC::MacroAssemblerARM::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerARM::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerARM::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerARM::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerARM::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerARM::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerARM::repatchCall):
+        (JSC::MacroAssemblerARM::linkCall):
+        * assembler/MacroAssemblerARM64.h:
+        (JSC::MacroAssemblerARM64::readCallTarget):
+        (JSC::MacroAssemblerARM64::replaceWithVMHalt):
+        (JSC::MacroAssemblerARM64::replaceWithJump):
+        (JSC::MacroAssemblerARM64::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerARM64::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerARM64::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerARM64::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerARM64::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerARM64::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerARM64::repatchCall):
+        (JSC::MacroAssemblerARM64::linkCall):
+        * assembler/MacroAssemblerARMv7.h:
+        (JSC::MacroAssemblerARMv7::replaceWithJump):
+        (JSC::MacroAssemblerARMv7::readCallTarget):
+        (JSC::MacroAssemblerARMv7::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerARMv7::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerARMv7::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerARMv7::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerARMv7::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerARMv7::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerARMv7::repatchCall):
+        (JSC::MacroAssemblerARMv7::linkCall):
+        * assembler/MacroAssemblerCodeRef.cpp:
+        (JSC::MacroAssemblerCodePtrBase::dumpWithName):
+        (JSC::MacroAssemblerCodeRefBase::tryToDisassemble):
+        (JSC::MacroAssemblerCodeRefBase::disassembly):
+        (JSC::MacroAssemblerCodePtr::createLLIntCodePtr): Deleted.
+        (JSC::MacroAssemblerCodePtr::dumpWithName const): Deleted.
+        (JSC::MacroAssemblerCodePtr::dump const): Deleted.
+        (JSC::MacroAssemblerCodeRef::createLLIntCodeRef): Deleted.
+        (JSC::MacroAssemblerCodeRef::tryToDisassemble const): Deleted.
+        (JSC::MacroAssemblerCodeRef::disassembly const): Deleted.
+        (JSC::MacroAssemblerCodeRef::dump const): Deleted.
+        * assembler/MacroAssemblerCodeRef.h:
+        (JSC::FunctionPtr::FunctionPtr):
+        (JSC::FunctionPtr::retagged const):
+        (JSC::FunctionPtr::retaggedExecutableAddress const):
+        (JSC::FunctionPtr::operator== const):
+        (JSC::FunctionPtr::operator!= const):
+        (JSC::ReturnAddressPtr::ReturnAddressPtr):
+        (JSC::MacroAssemblerCodePtr::MacroAssemblerCodePtr):
+        (JSC::MacroAssemblerCodePtr::createFromExecutableAddress):
+        (JSC::MacroAssemblerCodePtr::retagged const):
+        (JSC::MacroAssemblerCodePtr:: const):
+        (JSC::MacroAssemblerCodePtr::dumpWithName const):
+        (JSC::MacroAssemblerCodePtr::dump const):
+        (JSC::MacroAssemblerCodePtrHash::hash):
+        (JSC::MacroAssemblerCodePtrHash::equal):
+        (JSC::MacroAssemblerCodeRef::MacroAssemblerCodeRef):
+        (JSC::MacroAssemblerCodeRef::createSelfManagedCodeRef):
+        (JSC::MacroAssemblerCodeRef::code const):
+        (JSC::MacroAssemblerCodeRef::retaggedCode const):
+        (JSC::MacroAssemblerCodeRef::retagged const):
+        (JSC::MacroAssemblerCodeRef::tryToDisassemble const):
+        (JSC::MacroAssemblerCodeRef::disassembly const):
+        (JSC::MacroAssemblerCodeRef::dump const):
+        (JSC::FunctionPtr<tag>::FunctionPtr):
+        * assembler/MacroAssemblerMIPS.h:
+        (JSC::MacroAssemblerMIPS::readCallTarget):
+        (JSC::MacroAssemblerMIPS::replaceWithJump):
+        (JSC::MacroAssemblerMIPS::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerMIPS::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerMIPS::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerMIPS::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerMIPS::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerMIPS::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerMIPS::repatchCall):
+        (JSC::MacroAssemblerMIPS::linkCall):
+        * assembler/MacroAssemblerX86.h:
+        (JSC::MacroAssemblerX86::readCallTarget):
+        (JSC::MacroAssemblerX86::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerX86::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerX86::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerX86::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerX86::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerX86::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerX86::repatchCall):
+        (JSC::MacroAssemblerX86::linkCall):
+        * assembler/MacroAssemblerX86Common.h:
+        (JSC::MacroAssemblerX86Common::repatchCompact):
+        (JSC::MacroAssemblerX86Common::replaceWithVMHalt):
+        (JSC::MacroAssemblerX86Common::replaceWithJump):
+        * assembler/MacroAssemblerX86_64.h:
+        (JSC::MacroAssemblerX86_64::readCallTarget):
+        (JSC::MacroAssemblerX86_64::startOfBranchPtrWithPatchOnRegister):
+        (JSC::MacroAssemblerX86_64::startOfBranch32WithPatchOnRegister):
+        (JSC::MacroAssemblerX86_64::startOfPatchableBranchPtrWithPatchOnAddress):
+        (JSC::MacroAssemblerX86_64::startOfPatchableBranch32WithPatchOnAddress):
+        (JSC::MacroAssemblerX86_64::revertJumpReplacementToPatchableBranchPtrWithPatch):
+        (JSC::MacroAssemblerX86_64::revertJumpReplacementToPatchableBranch32WithPatch):
+        (JSC::MacroAssemblerX86_64::revertJumpReplacementToBranchPtrWithPatch):
+        (JSC::MacroAssemblerX86_64::repatchCall):
+        (JSC::MacroAssemblerX86_64::linkCall):
+        * assembler/testmasm.cpp:
+        (JSC::compile):
+        (JSC::invoke):
+        (JSC::testProbeModifiesProgramCounter):
+        * b3/B3Compilation.cpp:
+        (JSC::B3::Compilation::Compilation):
+        * b3/B3Compilation.h:
+        (JSC::B3::Compilation::code const):
+        (JSC::B3::Compilation::codeRef const):
+        * b3/B3Compile.cpp:
+        (JSC::B3::compile):
+        * b3/B3LowerMacros.cpp:
+        * b3/air/AirDisassembler.cpp:
+        (JSC::B3::Air::Disassembler::dump):
+        * b3/air/testair.cpp:
+        * b3/testb3.cpp:
+        (JSC::B3::invoke):
+        (JSC::B3::testInterpreter):
+        (JSC::B3::testEntrySwitchSimple):
+        (JSC::B3::testEntrySwitchNoEntrySwitch):
+        (JSC::B3::testEntrySwitchWithCommonPaths):
+        (JSC::B3::testEntrySwitchWithCommonPathsAndNonTrivialEntrypoint):
+        (JSC::B3::testEntrySwitchLoop):
+        * bytecode/AccessCase.cpp:
+        (JSC::AccessCase::generateImpl):
+        * bytecode/AccessCaseSnippetParams.cpp:
+        (JSC::SlowPathCallGeneratorWithArguments::generateImpl):
+        * bytecode/ByValInfo.h:
+        (JSC::ByValInfo::ByValInfo):
+        * bytecode/CallLinkInfo.cpp:
+        (JSC::CallLinkInfo::callReturnLocation):
+        (JSC::CallLinkInfo::patchableJump):
+        (JSC::CallLinkInfo::hotPathBegin):
+        (JSC::CallLinkInfo::slowPathStart):
+        * bytecode/CallLinkInfo.h:
+        (JSC::CallLinkInfo::setCallLocations):
+        (JSC::CallLinkInfo::hotPathOther):
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::finishCreation):
+        * bytecode/GetByIdStatus.cpp:
+        (JSC::GetByIdStatus::computeForStubInfoWithoutExitSiteFeedback):
+        * bytecode/GetByIdVariant.cpp:
+        (JSC::GetByIdVariant::GetByIdVariant):
+        (JSC::GetByIdVariant::dumpInContext const):
+        * bytecode/GetByIdVariant.h:
+        (JSC::GetByIdVariant::customAccessorGetter const):
+        * bytecode/GetterSetterAccessCase.cpp:
+        (JSC::GetterSetterAccessCase::create):
+        (JSC::GetterSetterAccessCase::GetterSetterAccessCase):
+        (JSC::GetterSetterAccessCase::dumpImpl const):
+        * bytecode/GetterSetterAccessCase.h:
+        (JSC::GetterSetterAccessCase::customAccessor const):
+        (): Deleted.
+        * bytecode/HandlerInfo.h:
+        (JSC::HandlerInfo::initialize):
+        * bytecode/InlineAccess.cpp:
+        (JSC::linkCodeInline):
+        (JSC::InlineAccess::rewireStubAsJump):
+        * bytecode/InlineAccess.h:
+        * bytecode/JumpTable.h:
+        (JSC::StringJumpTable::ctiForValue):
+        (JSC::SimpleJumpTable::ctiForValue):
+        * bytecode/LLIntCallLinkInfo.h:
+        (JSC::LLIntCallLinkInfo::unlink):
+        * bytecode/PolymorphicAccess.cpp:
+        (JSC::AccessGenerationState::emitExplicitExceptionHandler):
+        (JSC::PolymorphicAccess::regenerate):
+        * bytecode/PolymorphicAccess.h:
+        (JSC::AccessGenerationResult::AccessGenerationResult):
+        (JSC::AccessGenerationResult::code const):
+        * bytecode/StructureStubInfo.h:
+        (JSC::StructureStubInfo::slowPathCallLocation):
+        (JSC::StructureStubInfo::doneLocation):
+        (JSC::StructureStubInfo::slowPathStartLocation):
+        (JSC::StructureStubInfo::patchableJumpForIn):
+        * dfg/DFGCommonData.h:
+        (JSC::DFG::CommonData::appendCatchEntrypoint):
+        * dfg/DFGDisassembler.cpp:
+        (JSC::DFG::Disassembler::dumpDisassembly):
+        * dfg/DFGDriver.h:
+        * dfg/DFGJITCompiler.cpp:
+        (JSC::DFG::JITCompiler::linkOSRExits):
+        (JSC::DFG::JITCompiler::compileExceptionHandlers):
+        (JSC::DFG::JITCompiler::link):
+        (JSC::DFG::JITCompiler::compileFunction):
+        (JSC::DFG::JITCompiler::noticeCatchEntrypoint):
+        * dfg/DFGJITCompiler.h:
+        (JSC::DFG::CallLinkRecord::CallLinkRecord):
+        (JSC::DFG::JITCompiler::appendCall):
+        (JSC::DFG::JITCompiler::JSCallRecord::JSCallRecord):
+        (JSC::DFG::JITCompiler::JSDirectCallRecord::JSDirectCallRecord):
+        (JSC::DFG::JITCompiler::JSDirectTailCallRecord::JSDirectTailCallRecord):
+        * dfg/DFGJITFinalizer.cpp:
+        (JSC::DFG::JITFinalizer::JITFinalizer):
+        (JSC::DFG::JITFinalizer::finalize):
+        (JSC::DFG::JITFinalizer::finalizeFunction):
+        * dfg/DFGJITFinalizer.h:
+        * dfg/DFGJumpReplacement.h:
+        (JSC::DFG::JumpReplacement::JumpReplacement):
+        * dfg/DFGNode.h:
+        * dfg/DFGOSREntry.cpp:
+        (JSC::DFG::prepareOSREntry):
+        (JSC::DFG::prepareCatchOSREntry):
+        * dfg/DFGOSREntry.h:
+        (JSC::DFG::prepareOSREntry):
+        * dfg/DFGOSRExit.cpp:
+        (JSC::DFG::OSRExit::executeOSRExit):
+        (JSC::DFG::reifyInlinedCallFrames):
+        (JSC::DFG::adjustAndJumpToTarget):
+        (JSC::DFG::OSRExit::codeLocationForRepatch const):
+        (JSC::DFG::OSRExit::emitRestoreArguments):
+        (JSC::DFG::OSRExit::compileOSRExit):
+        * dfg/DFGOSRExit.h:
+        * dfg/DFGOSRExitCompilerCommon.cpp:
+        (JSC::DFG::handleExitCounts):
+        (JSC::DFG::reifyInlinedCallFrames):
+        (JSC::DFG::osrWriteBarrier):
+        (JSC::DFG::adjustAndJumpToTarget):
+        * dfg/DFGOperations.cpp:
+        * dfg/DFGSlowPathGenerator.h:
+        (JSC::DFG::CallResultAndArgumentsSlowPathGenerator::CallResultAndArgumentsSlowPathGenerator):
+        (JSC::DFG::CallResultAndArgumentsSlowPathGenerator::unpackAndGenerate):
+        (JSC::DFG::slowPathCall):
+        * dfg/DFGSpeculativeJIT.cpp:
+        (JSC::DFG::SpeculativeJIT::compileMathIC):
+        (JSC::DFG::SpeculativeJIT::compileCallDOM):
+        (JSC::DFG::SpeculativeJIT::compileCallDOMGetter):
+        (JSC::DFG::SpeculativeJIT::emitSwitchIntJump):
+        (JSC::DFG::SpeculativeJIT::emitSwitchImm):
+        (JSC::DFG::SpeculativeJIT::emitSwitchStringOnString):
+        (JSC::DFG::SpeculativeJIT::compileHasIndexedProperty):
+        (JSC::DFG::SpeculativeJIT::compileGetDirectPname):
+        (JSC::DFG::SpeculativeJIT::cachedPutById):
+        * dfg/DFGSpeculativeJIT.h:
+        (JSC::DFG::SpeculativeJIT::callOperation):
+        (JSC::DFG::SpeculativeJIT::appendCall):
+        (JSC::DFG::SpeculativeJIT::appendCallWithCallFrameRollbackOnException):
+        (JSC::DFG::SpeculativeJIT::appendCallWithCallFrameRollbackOnExceptionSetResult):
+        (JSC::DFG::SpeculativeJIT::appendCallSetResult):
+        * dfg/DFGSpeculativeJIT64.cpp:
+        (JSC::DFG::SpeculativeJIT::cachedGetById):
+        (JSC::DFG::SpeculativeJIT::cachedGetByIdWithThis):
+        (JSC::DFG::SpeculativeJIT::compile):
+        * dfg/DFGThunks.cpp:
+        (JSC::DFG::osrExitThunkGenerator):
+        (JSC::DFG::osrExitGenerationThunkGenerator):
+        (JSC::DFG::osrEntryThunkGenerator):
+        * dfg/DFGThunks.h:
+        * disassembler/ARM64Disassembler.cpp:
+        (JSC::tryToDisassemble):
+        * disassembler/ARMv7Disassembler.cpp:
+        (JSC::tryToDisassemble):
+        * disassembler/Disassembler.cpp:
+        (JSC::disassemble):
+        (JSC::disassembleAsynchronously):
+        * disassembler/Disassembler.h:
+        (JSC::tryToDisassemble):
+        * disassembler/UDis86Disassembler.cpp:
+        (JSC::tryToDisassembleWithUDis86):
+        * disassembler/UDis86Disassembler.h:
+        (JSC::tryToDisassembleWithUDis86):
+        * disassembler/X86Disassembler.cpp:
+        (JSC::tryToDisassemble):
+        * ftl/FTLCompile.cpp:
+        (JSC::FTL::compile):
+        * ftl/FTLExceptionTarget.cpp:
+        (JSC::FTL::ExceptionTarget::label):
+        (JSC::FTL::ExceptionTarget::jumps):
+        * ftl/FTLExceptionTarget.h:
+        * ftl/FTLGeneratedFunction.h:
+        * ftl/FTLJITCode.cpp:
+        (JSC::FTL::JITCode::initializeB3Code):
+        (JSC::FTL::JITCode::initializeAddressForCall):
+        (JSC::FTL::JITCode::initializeArityCheckEntrypoint):
+        (JSC::FTL::JITCode::addressForCall):
+        (JSC::FTL::JITCode::executableAddressAtOffset):
+        * ftl/FTLJITCode.h:
+        (JSC::FTL::JITCode::b3Code const):
+        * ftl/FTLJITFinalizer.cpp:
+        (JSC::FTL::JITFinalizer::finalizeCommon):
+        * ftl/FTLLazySlowPath.cpp:
+        (JSC::FTL::LazySlowPath::initialize):
+        (JSC::FTL::LazySlowPath::generate):
+        * ftl/FTLLazySlowPath.h:
+        (JSC::FTL::LazySlowPath::patchableJump const):
+        (JSC::FTL::LazySlowPath::done const):
+        (JSC::FTL::LazySlowPath::stub const):
+        * ftl/FTLLazySlowPathCall.h:
+        (JSC::FTL::createLazyCallGenerator):
+        * ftl/FTLLink.cpp:
+        (JSC::FTL::link):
+        * ftl/FTLLowerDFGToB3.cpp:
+        (JSC::FTL::DFG::LowerDFGToB3::lower):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstruct):
+        (JSC::FTL::DFG::LowerDFGToB3::compileDirectCallOrConstruct):
+        (JSC::FTL::DFG::LowerDFGToB3::compileTailCall):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargsSpread):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallOrConstructVarargs):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallEval):
+        (JSC::FTL::DFG::LowerDFGToB3::compileInvalidationPoint):
+        (JSC::FTL::DFG::LowerDFGToB3::compileIn):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCheckSubClass):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallDOM):
+        (JSC::FTL::DFG::LowerDFGToB3::compileCallDOMGetter):
+        (JSC::FTL::DFG::LowerDFGToB3::lazySlowPath):
+        * ftl/FTLOSRExit.cpp:
+        (JSC::FTL::OSRExit::codeLocationForRepatch const):
+        * ftl/FTLOSRExit.h:
+        * ftl/FTLOSRExitCompiler.cpp:
+        (JSC::FTL::compileStub):
+        (JSC::FTL::compileFTLOSRExit):
+        * ftl/FTLOSRExitHandle.cpp:
+        (JSC::FTL::OSRExitHandle::emitExitThunk):
+        * ftl/FTLOperations.cpp:
+        (JSC::FTL::compileFTLLazySlowPath):
+        * ftl/FTLPatchpointExceptionHandle.cpp:
+        (JSC::FTL::PatchpointExceptionHandle::scheduleExitCreationForUnwind):
+        * ftl/FTLSlowPathCall.cpp:
+        (JSC::FTL::SlowPathCallContext::keyWithTarget const):
+        (JSC::FTL::SlowPathCallContext::makeCall):
+        * ftl/FTLSlowPathCall.h:
+        (JSC::FTL::callOperation):
+        * ftl/FTLSlowPathCallKey.cpp:
+        (JSC::FTL::SlowPathCallKey::dump const):
+        * ftl/FTLSlowPathCallKey.h:
+        (JSC::FTL::SlowPathCallKey::SlowPathCallKey):
+        (JSC::FTL::SlowPathCallKey::callTarget const):
+        (JSC::FTL::SlowPathCallKey::withCallTarget):
+        (JSC::FTL::SlowPathCallKey::hash const):
+        (JSC::FTL::SlowPathCallKey::callPtrTag const): Deleted.
+        * ftl/FTLState.cpp:
+        (JSC::FTL::State::State):
+        * ftl/FTLThunks.cpp:
+        (JSC::FTL::genericGenerationThunkGenerator):
+        (JSC::FTL::osrExitGenerationThunkGenerator):
+        (JSC::FTL::lazySlowPathGenerationThunkGenerator):
+        (JSC::FTL::slowPathCallThunkGenerator):
+        * ftl/FTLThunks.h:
+        (JSC::FTL::generateIfNecessary):
+        (JSC::FTL::keyForThunk):
+        (JSC::FTL::Thunks::getSlowPathCallThunk):
+        (JSC::FTL::Thunks::keyForSlowPathCallThunk):
+        * interpreter/InterpreterInlines.h:
+        (JSC::Interpreter::getOpcodeID):
+        * jit/AssemblyHelpers.cpp:
+        (JSC::AssemblyHelpers::callExceptionFuzz):
+        (JSC::AssemblyHelpers::emitDumbVirtualCall):
+        (JSC::AssemblyHelpers::debugCall):
+        * jit/CCallHelpers.cpp:
+        (JSC::CCallHelpers::ensureShadowChickenPacket):
+        * jit/ExecutableAllocator.cpp:
+        (JSC::FixedVMPoolExecutableAllocator::initializeSeparatedWXHeaps):
+        (JSC::FixedVMPoolExecutableAllocator::jitWriteThunkGenerator):
+        * jit/ExecutableAllocator.h:
+        (JSC::performJITMemcpy):
+        * jit/GCAwareJITStubRoutine.cpp:
+        (JSC::GCAwareJITStubRoutine::GCAwareJITStubRoutine):
+        (JSC::MarkingGCAwareJITStubRoutine::MarkingGCAwareJITStubRoutine):
+        (JSC::GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler):
+        (JSC::createJITStubRoutine):
+        * jit/GCAwareJITStubRoutine.h:
+        (JSC::createJITStubRoutine):
+        * jit/JIT.cpp:
+        (JSC::ctiPatchCallByReturnAddress):
+        (JSC::JIT::compileWithoutLinking):
+        (JSC::JIT::link):
+        (JSC::JIT::privateCompileExceptionHandlers):
+        * jit/JIT.h:
+        (JSC::CallRecord::CallRecord):
+        * jit/JITArithmetic.cpp:
+        (JSC::JIT::emitMathICFast):
+        (JSC::JIT::emitMathICSlow):
+        * jit/JITCall.cpp:
+        (JSC::JIT::compileOpCallSlowCase):
+        * jit/JITCall32_64.cpp:
+        (JSC::JIT::compileOpCallSlowCase):
+        * jit/JITCode.cpp:
+        (JSC::JITCodeWithCodeRef::JITCodeWithCodeRef):
+        (JSC::JITCodeWithCodeRef::executableAddressAtOffset):
+        (JSC::DirectJITCode::DirectJITCode):
+        (JSC::DirectJITCode::initializeCodeRef):
+        (JSC::DirectJITCode::addressForCall):
+        (JSC::NativeJITCode::NativeJITCode):
+        (JSC::NativeJITCode::initializeCodeRef):
+        (JSC::NativeJITCode::addressForCall):
+        * jit/JITCode.h:
+        * jit/JITCodeMap.h:
+        (JSC::JITCodeMap::Entry::Entry):
+        (JSC::JITCodeMap::Entry::codeLocation):
+        (JSC::JITCodeMap::append):
+        (JSC::JITCodeMap::find const):
+        * jit/JITDisassembler.cpp:
+        (JSC::JITDisassembler::dumpDisassembly):
+        * jit/JITExceptions.cpp:
+        (JSC::genericUnwind):
+        * jit/JITInlineCacheGenerator.cpp:
+        (JSC::JITByIdGenerator::finalize):
+        * jit/JITInlines.h:
+        (JSC::JIT::emitNakedCall):
+        (JSC::JIT::emitNakedTailCall):
+        (JSC::JIT::appendCallWithExceptionCheck):
+        (JSC::JIT::appendCallWithExceptionCheckAndSlowPathReturnType):
+        (JSC::JIT::appendCallWithCallFrameRollbackOnException):
+        (JSC::JIT::appendCallWithExceptionCheckSetJSValueResult):
+        (JSC::JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile):
+        * jit/JITMathIC.h:
+        (JSC::isProfileEmpty):
+        * jit/JITOpcodes.cpp:
+        (JSC::JIT::emit_op_catch):
+        (JSC::JIT::emit_op_switch_imm):
+        (JSC::JIT::emit_op_switch_char):
+        (JSC::JIT::emit_op_switch_string):
+        (JSC::JIT::privateCompileHasIndexedProperty):
+        (JSC::JIT::emitSlow_op_has_indexed_property):
+        * jit/JITOpcodes32_64.cpp:
+        (JSC::JIT::privateCompileHasIndexedProperty):
+        * jit/JITOperations.cpp:
+        (JSC::getByVal):
+        * jit/JITPropertyAccess.cpp:
+        (JSC::JIT::stringGetByValStubGenerator):
+        (JSC::JIT::emitGetByValWithCachedId):
+        (JSC::JIT::emitSlow_op_get_by_val):
+        (JSC::JIT::emitPutByValWithCachedId):
+        (JSC::JIT::emitSlow_op_put_by_val):
+        (JSC::JIT::emitSlow_op_try_get_by_id):
+        (JSC::JIT::emitSlow_op_get_by_id_direct):
+        (JSC::JIT::emitSlow_op_get_by_id):
+        (JSC::JIT::emitSlow_op_get_by_id_with_this):
+        (JSC::JIT::emitSlow_op_put_by_id):
+        (JSC::JIT::privateCompileGetByVal):
+        (JSC::JIT::privateCompileGetByValWithCachedId):
+        (JSC::JIT::privateCompilePutByVal):
+        (JSC::JIT::privateCompilePutByValWithCachedId):
+        * jit/JITPropertyAccess32_64.cpp:
+        (JSC::JIT::stringGetByValStubGenerator):
+        (JSC::JIT::emitSlow_op_get_by_val):
+        (JSC::JIT::emitSlow_op_put_by_val):
+        * jit/JITStubRoutine.h:
+        (JSC::JITStubRoutine::JITStubRoutine):
+        (JSC::JITStubRoutine::createSelfManagedRoutine):
+        (JSC::JITStubRoutine::code const):
+        (JSC::JITStubRoutine::asCodePtr):
+        * jit/JITThunks.cpp:
+        (JSC::JITThunks::ctiNativeCall):
+        (JSC::JITThunks::ctiNativeConstruct):
+        (JSC::JITThunks::ctiNativeTailCall):
+        (JSC::JITThunks::ctiNativeTailCallWithoutSavedTags):
+        (JSC::JITThunks::ctiInternalFunctionCall):
+        (JSC::JITThunks::ctiInternalFunctionConstruct):
+        (JSC::JITThunks::ctiStub):
+        (JSC::JITThunks::existingCTIStub):
+        (JSC::JITThunks::hostFunctionStub):
+        * jit/JITThunks.h:
+        * jit/PCToCodeOriginMap.cpp:
+        (JSC::PCToCodeOriginMap::PCToCodeOriginMap):
+        * jit/PCToCodeOriginMap.h:
+        * jit/PolymorphicCallStubRoutine.cpp:
+        (JSC::PolymorphicCallStubRoutine::PolymorphicCallStubRoutine):
+        * jit/PolymorphicCallStubRoutine.h:
+        * jit/Repatch.cpp:
+        (JSC::readPutICCallTarget):
+        (JSC::ftlThunkAwareRepatchCall):
+        (JSC::appropriateOptimizingGetByIdFunction):
+        (JSC::appropriateGetByIdFunction):
+        (JSC::tryCacheGetByID):
+        (JSC::repatchGetByID):
+        (JSC::tryCachePutByID):
+        (JSC::repatchPutByID):
+        (JSC::tryCacheIn):
+        (JSC::repatchIn):
+        (JSC::linkSlowFor):
+        (JSC::linkFor):
+        (JSC::linkDirectFor):
+        (JSC::revertCall):
+        (JSC::unlinkFor):
+        (JSC::linkVirtualFor):
+        (JSC::linkPolymorphicCall):
+        (JSC::resetGetByID):
+        (JSC::resetPutByID):
+        * jit/Repatch.h:
+        * jit/SlowPathCall.h:
+        (JSC::JITSlowPathCall::call):
+        * jit/SpecializedThunkJIT.h:
+        (JSC::SpecializedThunkJIT::finalize):
+        (JSC::SpecializedThunkJIT::callDoubleToDouble):
+        (JSC::SpecializedThunkJIT::callDoubleToDoublePreservingReturn):
+        * jit/ThunkGenerator.h:
+        * jit/ThunkGenerators.cpp:
+        (JSC::throwExceptionFromCallSlowPathGenerator):
+        (JSC::slowPathFor):
+        (JSC::linkCallThunkGenerator):
+        (JSC::linkPolymorphicCallThunkGenerator):
+        (JSC::virtualThunkFor):
+        (JSC::nativeForGenerator):
+        (JSC::nativeCallGenerator):
+        (JSC::nativeTailCallGenerator):
+        (JSC::nativeTailCallWithoutSavedTagsGenerator):
+        (JSC::nativeConstructGenerator):
+        (JSC::internalFunctionCallGenerator):
+        (JSC::internalFunctionConstructGenerator):
+        (JSC::arityFixupGenerator):
+        (JSC::unreachableGenerator):
+        (JSC::charCodeAtThunkGenerator):
+        (JSC::charAtThunkGenerator):
+        (JSC::fromCharCodeThunkGenerator):
+        (JSC::clz32ThunkGenerator):
+        (JSC::sqrtThunkGenerator):
+        (JSC::floorThunkGenerator):
+        (JSC::ceilThunkGenerator):
+        (JSC::truncThunkGenerator):
+        (JSC::roundThunkGenerator):
+        (JSC::expThunkGenerator):
+        (JSC::logThunkGenerator):
+        (JSC::absThunkGenerator):
+        (JSC::imulThunkGenerator):
+        (JSC::randomThunkGenerator):
+        (JSC::boundThisNoArgsFunctionCallGenerator):
+        * jit/ThunkGenerators.h:
+        * llint/LLIntData.cpp:
+        (JSC::LLInt::initialize):
+        * llint/LLIntData.h:
+        (JSC::LLInt::getExecutableAddress):
+        (JSC::LLInt::getCodePtr):
+        (JSC::LLInt::getCodeRef):
+        (JSC::LLInt::getCodeFunctionPtr):
+        * llint/LLIntEntrypoint.cpp:
+        (JSC::LLInt::setFunctionEntrypoint):
+        (JSC::LLInt::setEvalEntrypoint):
+        (JSC::LLInt::setProgramEntrypoint):
+        (JSC::LLInt::setModuleProgramEntrypoint):
+        * llint/LLIntExceptions.cpp:
+        (JSC::LLInt::callToThrow):
+        * llint/LLIntSlowPaths.cpp:
+        (JSC::LLInt::LLINT_SLOW_PATH_DECL):
+        (JSC::LLInt::setUpCall):
+        * llint/LLIntThunks.cpp:
+        (JSC::vmEntryToWasm):
+        (JSC::LLInt::generateThunkWithJumpTo):
+        (JSC::LLInt::functionForCallEntryThunkGenerator):
+        (JSC::LLInt::functionForConstructEntryThunkGenerator):
+        (JSC::LLInt::functionForCallArityCheckThunkGenerator):
+        (JSC::LLInt::functionForConstructArityCheckThunkGenerator):
+        (JSC::LLInt::evalEntryThunkGenerator):
+        (JSC::LLInt::programEntryThunkGenerator):
+        (JSC::LLInt::moduleProgramEntryThunkGenerator):
+        * llint/LLIntThunks.h:
+        * llint/LowLevelInterpreter.asm:
+        * llint/LowLevelInterpreter32_64.asm:
+        * llint/LowLevelInterpreter64.asm:
+        * profiler/ProfilerCompilation.cpp:
+        (JSC::Profiler::Compilation::addOSRExitSite):
+        * profiler/ProfilerCompilation.h:
+        * profiler/ProfilerOSRExitSite.cpp:
+        (JSC::Profiler::OSRExitSite::toJS const):
+        * profiler/ProfilerOSRExitSite.h:
+        (JSC::Profiler::OSRExitSite::OSRExitSite):
+        (JSC::Profiler::OSRExitSite::codeAddress const):
+        (JSC::Profiler::OSRExitSite:: const): Deleted.
+        * runtime/ExecutableBase.cpp:
+        (JSC::ExecutableBase::clearCode):
+        * runtime/ExecutableBase.h:
+        (JSC::ExecutableBase::entrypointFor):
+        * runtime/NativeExecutable.cpp:
+        (JSC::NativeExecutable::finishCreation):
+        * runtime/NativeFunction.h:
+        (JSC::TaggedNativeFunction::TaggedNativeFunction):
+        (JSC::TaggedNativeFunction::operator NativeFunction):
+        * runtime/PtrTag.h:
+        (JSC::tagCodePtr):
+        (JSC::untagCodePtr):
+        (JSC::retagCodePtr):
+        (JSC::tagCFunctionPtr):
+        (JSC::untagCFunctionPtr):
+        (JSC::nextPtrTagID): Deleted.
+        * runtime/PutPropertySlot.h:
+        (JSC::PutPropertySlot::PutPropertySlot):
+        (JSC::PutPropertySlot::setCustomValue):
+        (JSC::PutPropertySlot::setCustomAccessor):
+        (JSC::PutPropertySlot::customSetter const):
+        * runtime/ScriptExecutable.cpp:
+        (JSC::ScriptExecutable::installCode):
+        * runtime/VM.cpp:
+        (JSC::VM::getHostFunction):
+        (JSC::VM::getCTIInternalFunctionTrampolineFor):
+        * runtime/VM.h:
+        (JSC::VM::getCTIStub):
+        * wasm/WasmB3IRGenerator.cpp:
+        (JSC::Wasm::B3IRGenerator::B3IRGenerator):
+        (JSC::Wasm::B3IRGenerator::emitExceptionCheck):
+        (JSC::Wasm::B3IRGenerator::emitTierUpCheck):
+        (JSC::Wasm::B3IRGenerator::addCall):
+        (JSC::Wasm::B3IRGenerator::addCallIndirect):
+        * wasm/WasmBBQPlan.cpp:
+        (JSC::Wasm::BBQPlan::prepare):
+        (JSC::Wasm::BBQPlan::complete):
+        * wasm/WasmBBQPlan.h:
+        * wasm/WasmBinding.cpp:
+        (JSC::Wasm::wasmToWasm):
+        * wasm/WasmBinding.h:
+        * wasm/WasmCallee.h:
+        (JSC::Wasm::Callee::entrypoint const):
+        * wasm/WasmCallingConvention.h:
+        (JSC::Wasm::CallingConvention::setupFrameInPrologue const):
+        * wasm/WasmCodeBlock.h:
+        (JSC::Wasm::CodeBlock::entrypointLoadLocationFromFunctionIndexSpace):
+        * wasm/WasmFaultSignalHandler.cpp:
+        (JSC::Wasm::trapHandler):
+        * wasm/WasmFormat.h:
+        * wasm/WasmInstance.h:
+        * wasm/WasmOMGPlan.cpp:
+        (JSC::Wasm::OMGPlan::work):
+        * wasm/WasmThunks.cpp:
+        (JSC::Wasm::throwExceptionFromWasmThunkGenerator):
+        (JSC::Wasm::throwStackOverflowFromWasmThunkGenerator):
+        (JSC::Wasm::triggerOMGTierUpThunkGenerator):
+        (JSC::Wasm::Thunks::stub):
+        (JSC::Wasm::Thunks::existingStub):
+        * wasm/WasmThunks.h:
+        * wasm/js/JSToWasm.cpp:
+        (JSC::Wasm::createJSToWasmWrapper):
+        * wasm/js/JSWebAssemblyCodeBlock.h:
+        * wasm/js/WasmToJS.cpp:
+        (JSC::Wasm::handleBadI64Use):
+        (JSC::Wasm::wasmToJS):
+        * wasm/js/WasmToJS.h:
+        * wasm/js/WebAssemblyFunction.h:
+        * yarr/YarrJIT.cpp:
+        (JSC::Yarr::YarrGenerator::loadFromFrameAndJump):
+        (JSC::Yarr::YarrGenerator::BacktrackingState::linkDataLabels):
+        (JSC::Yarr::YarrGenerator::compile):
+        * yarr/YarrJIT.h:
+        (JSC::Yarr::YarrCodeBlock::set8BitCode):
+        (JSC::Yarr::YarrCodeBlock::set16BitCode):
+        (JSC::Yarr::YarrCodeBlock::set8BitCodeMatchOnly):
+        (JSC::Yarr::YarrCodeBlock::set16BitCodeMatchOnly):
+        (JSC::Yarr::YarrCodeBlock::execute):
+        (JSC::Yarr::YarrCodeBlock::clear):
+
 2018-04-17  Commit Queue  <commit-queue@webkit.org>
 
         Unreviewed, rolling out r230697, r230720, and r230724.
diff --git a/Source/JavaScriptCore/assembler/AbstractMacroAssembler.h b/Source/JavaScriptCore/assembler/AbstractMacroAssembler.h
index ecdcb75..a82d629 100644
--- a/Source/JavaScriptCore/assembler/AbstractMacroAssembler.h
+++ b/Source/JavaScriptCore/assembler/AbstractMacroAssembler.h
@@ -79,8 +79,8 @@
     typedef AbstractMacroAssembler<AssemblerType> AbstractMacroAssemblerType;
     typedef AssemblerType AssemblerType_T;
 
-    typedef MacroAssemblerCodePtr CodePtr;
-    typedef MacroAssemblerCodeRef CodeRef;
+    template<PtrTag tag> using CodePtr = MacroAssemblerCodePtr<tag>;
+    template<PtrTag tag> using CodeRef = MacroAssemblerCodeRef<tag>;
 
     class Jump;
 
@@ -394,7 +394,7 @@
         friend class AbstractMacroAssembler<AssemblerType>;
         friend struct DFG::OSRExit;
         friend class Jump;
-        friend class MacroAssemblerCodeRef;
+        template<PtrTag> friend class MacroAssemblerCodeRef;
         friend class LinkBuffer;
         friend class Watchpoint;
 
@@ -843,9 +843,10 @@
         return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
     }
 
-    static ptrdiff_t differenceBetweenCodePtr(const MacroAssemblerCodePtr& a, const MacroAssemblerCodePtr& b)
+    template<PtrTag aTag, PtrTag bTag>
+    static ptrdiff_t differenceBetweenCodePtr(const MacroAssemblerCodePtr<aTag>& a, const MacroAssemblerCodePtr<bTag>& b)
     {
-        return b.executableAddress<ptrdiff_t>() - a.executableAddress<ptrdiff_t>();
+        return b.template dataLocation<ptrdiff_t>() - a.template dataLocation<ptrdiff_t>();
     }
 
     unsigned debugOffset() { return m_assembler.debugOffset(); }
@@ -857,7 +858,8 @@
 
     AssemblerType m_assembler;
     
-    static void linkJump(void* code, Jump jump, CodeLocationLabel target)
+    template<PtrTag tag>
+    static void linkJump(void* code, Jump jump, CodeLocationLabel<tag> target)
     {
         AssemblerType::linkJump(code, jump.m_label, target.dataLocation());
     }
@@ -867,12 +869,14 @@
         AssemblerType::linkPointer(code, label, value);
     }
 
-    static void linkPointer(void* code, AssemblerLabel label, MacroAssemblerCodePtr value)
+    template<PtrTag tag>
+    static void linkPointer(void* code, AssemblerLabel label, MacroAssemblerCodePtr<tag> value)
     {
         AssemblerType::linkPointer(code, label, value.executableAddress());
     }
 
-    static void* getLinkerAddress(void* code, AssemblerLabel label, PtrTag tag = NoPtrTag)
+    template<PtrTag tag>
+    static void* getLinkerAddress(void* code, AssemblerLabel label)
     {
         return tagCodePtr(AssemblerType::getRelocatedAddress(code, label), tag);
     }
@@ -882,56 +886,64 @@
         return AssemblerType::getCallReturnOffset(call.m_label);
     }
 
-    static void repatchJump(CodeLocationJump jump, CodeLocationLabel destination)
+    template<PtrTag jumpTag, PtrTag destTag>
+    static void repatchJump(CodeLocationJump<jumpTag> jump, CodeLocationLabel<destTag> destination)
     {
         AssemblerType::relinkJump(jump.dataLocation(), destination.dataLocation());
     }
     
-    static void repatchJumpToNop(CodeLocationJump jump)
+    template<PtrTag jumpTag>
+    static void repatchJumpToNop(CodeLocationJump<jumpTag> jump)
     {
         AssemblerType::relinkJumpToNop(jump.dataLocation());
     }
 
-    static void repatchNearCall(CodeLocationNearCall nearCall, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchNearCall(CodeLocationNearCall<callTag> nearCall, CodeLocationLabel<destTag> destination)
     {
-        assertIsTaggedWith(destination.executableAddress(), NearCodePtrTag);
         switch (nearCall.callMode()) {
         case NearCallMode::Tail:
             AssemblerType::relinkJump(nearCall.dataLocation(), destination.dataLocation());
             return;
         case NearCallMode::Regular:
-            AssemblerType::relinkCall(nearCall.dataLocation(), destination.executableAddress());
+            AssemblerType::relinkCall(nearCall.dataLocation(), destination.untaggedExecutableAddress());
             return;
         }
         RELEASE_ASSERT_NOT_REACHED();
     }
 
-    static void repatchCompact(CodeLocationDataLabelCompact dataLabelCompact, int32_t value)
+    template<PtrTag tag>
+    static void repatchCompact(CodeLocationDataLabelCompact<tag> dataLabelCompact, int32_t value)
     {
-        AssemblerType::repatchCompact(dataLabelCompact.dataLocation(), value);
+        AssemblerType::repatchCompact(dataLabelCompact.template dataLocation(), value);
     }
-    
-    static void repatchInt32(CodeLocationDataLabel32 dataLabel32, int32_t value)
+
+    template<PtrTag tag>
+    static void repatchInt32(CodeLocationDataLabel32<tag> dataLabel32, int32_t value)
     {
         AssemblerType::repatchInt32(dataLabel32.dataLocation(), value);
     }
 
-    static void repatchPointer(CodeLocationDataLabelPtr dataLabelPtr, void* value)
+    template<PtrTag tag>
+    static void repatchPointer(CodeLocationDataLabelPtr<tag> dataLabelPtr, void* value)
     {
         AssemblerType::repatchPointer(dataLabelPtr.dataLocation(), value);
     }
-    
-    static void* readPointer(CodeLocationDataLabelPtr dataLabelPtr)
+
+    template<PtrTag tag>
+    static void* readPointer(CodeLocationDataLabelPtr<tag> dataLabelPtr)
     {
         return AssemblerType::readPointer(dataLabelPtr.dataLocation());
     }
     
-    static void replaceWithLoad(CodeLocationConvertibleLoad label)
+    template<PtrTag tag>
+    static void replaceWithLoad(CodeLocationConvertibleLoad<tag> label)
     {
         AssemblerType::replaceWithLoad(label.dataLocation());
     }
-    
-    static void replaceWithAddressComputation(CodeLocationConvertibleLoad label)
+
+    template<PtrTag tag>
+    static void replaceWithAddressComputation(CodeLocationConvertibleLoad<tag> label)
     {
         AssemblerType::replaceWithAddressComputation(label.dataLocation());
     }
diff --git a/Source/JavaScriptCore/assembler/CodeLocation.h b/Source/JavaScriptCore/assembler/CodeLocation.h
index 6b36a3c..75446df 100644
--- a/Source/JavaScriptCore/assembler/CodeLocation.h
+++ b/Source/JavaScriptCore/assembler/CodeLocation.h
@@ -33,15 +33,15 @@
 
 enum NearCallMode { Regular, Tail };
 
-class CodeLocationInstruction;
-class CodeLocationLabel;
-class CodeLocationJump;
-class CodeLocationCall;
-class CodeLocationNearCall;
-class CodeLocationDataLabelCompact;
-class CodeLocationDataLabel32;
-class CodeLocationDataLabelPtr;
-class CodeLocationConvertibleLoad;
+template<PtrTag> class CodeLocationInstruction;
+template<PtrTag> class CodeLocationLabel;
+template<PtrTag> class CodeLocationJump;
+template<PtrTag> class CodeLocationCall;
+template<PtrTag> class CodeLocationNearCall;
+template<PtrTag> class CodeLocationDataLabelCompact;
+template<PtrTag> class CodeLocationDataLabel32;
+template<PtrTag> class CodeLocationDataLabelPtr;
+template<PtrTag> class CodeLocationConvertibleLoad;
 
 // The CodeLocation* types are all pretty much do-nothing wrappers around
 // CodePtr (or MacroAssemblerCodePtr, to give it its full name).  These
@@ -54,169 +54,212 @@
 // offsets apart.  To reduce memory overhead we will only retain a pointer to
 // one of the instructions, and we will use the *AtOffset methods provided by
 // CodeLocationCommon to find the other points in the code to modify.
-class CodeLocationCommon : public MacroAssemblerCodePtr {
+template<PtrTag tag>
+class CodeLocationCommon : public MacroAssemblerCodePtr<tag> {
+    using Base = MacroAssemblerCodePtr<tag>;
 public:
-    CodeLocationInstruction instructionAtOffset(int offset);
-    CodeLocationLabel labelAtOffset(int offset);
-    CodeLocationJump jumpAtOffset(int offset);
-    CodeLocationCall callAtOffset(int offset);
-    CodeLocationNearCall nearCallAtOffset(int offset, NearCallMode);
-    CodeLocationDataLabelPtr dataLabelPtrAtOffset(int offset);
-    CodeLocationDataLabel32 dataLabel32AtOffset(int offset);
-    CodeLocationDataLabelCompact dataLabelCompactAtOffset(int offset);
-    CodeLocationConvertibleLoad convertibleLoadAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationInstruction<resultTag> instructionAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationLabel<resultTag> labelAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationJump<resultTag> jumpAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationCall<resultTag> callAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationNearCall<resultTag> nearCallAtOffset(int offset, NearCallMode);
+    template<PtrTag resultTag = tag> CodeLocationDataLabelPtr<resultTag> dataLabelPtrAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationDataLabel32<resultTag> dataLabel32AtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationDataLabelCompact<resultTag> dataLabelCompactAtOffset(int offset);
+    template<PtrTag resultTag = tag> CodeLocationConvertibleLoad<resultTag> convertibleLoadAtOffset(int offset);
+
+    template<typename T = void*>
+    T dataLocation() const { return Base::template dataLocation<T>(); }
 
 protected:
     CodeLocationCommon()
     {
     }
 
-    CodeLocationCommon(MacroAssemblerCodePtr location)
-        : MacroAssemblerCodePtr(location)
+    CodeLocationCommon(MacroAssemblerCodePtr<tag> location)
+        : MacroAssemblerCodePtr<tag>(location)
     {
     }
 };
 
-class CodeLocationInstruction : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationInstruction : public CodeLocationCommon<tag> {
 public:
-    CodeLocationInstruction() {}
-    explicit CodeLocationInstruction(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationInstruction() { }
+    explicit CodeLocationInstruction(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationInstruction(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 };
 
-class CodeLocationLabel : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationLabel : public CodeLocationCommon<tag> {
 public:
-    CodeLocationLabel() {}
-    explicit CodeLocationLabel(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationLabel() { }
+    explicit CodeLocationLabel(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationLabel(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 
-    CodeLocationLabel retagged(PtrTag oldTag, PtrTag newTag) { return CodeLocationLabel(MacroAssemblerCodePtr::retagged(oldTag, newTag)); }
+    template<PtrTag newTag>
+    CodeLocationLabel<newTag> retagged() { return CodeLocationLabel<newTag>(CodeLocationCommon<tag>::template retagged<newTag>()); }
+
+    template<typename T = void*>
+    T untaggedExecutableAddress() const { return CodeLocationCommon<tag>::template untaggedExecutableAddress<T>(); }
+
+    template<typename T = void*>
+    T dataLocation() const { return CodeLocationCommon<tag>::template dataLocation<T>(); }
 };
 
-class CodeLocationJump : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationJump : public CodeLocationCommon<tag> {
 public:
-    CodeLocationJump() {}
-    explicit CodeLocationJump(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationJump() { }
+    explicit CodeLocationJump(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationJump(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 
-    CodeLocationJump retagged(PtrTag oldTag, PtrTag newTag) { return CodeLocationJump(MacroAssemblerCodePtr::retagged(oldTag, newTag)); }
+    template<PtrTag newTag>
+    CodeLocationJump<newTag> retagged() { return CodeLocationJump<newTag>(MacroAssemblerCodePtr<tag>::template retagged<newTag>()); }
 };
 
-class CodeLocationCall : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationCall : public CodeLocationCommon<tag> {
 public:
-    CodeLocationCall() {}
-    explicit CodeLocationCall(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationCall() { }
+    explicit CodeLocationCall(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationCall(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
+
+    template<PtrTag newTag>
+    CodeLocationCall<newTag> retagged() { return CodeLocationCall<newTag>(CodeLocationCommon<tag>::template retagged<newTag>()); }
 };
 
-class CodeLocationNearCall : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationNearCall : public CodeLocationCommon<tag> {
 public:
-    CodeLocationNearCall() {}
-    explicit CodeLocationNearCall(MacroAssemblerCodePtr location, NearCallMode callMode)
-        : CodeLocationCommon(location), m_callMode(callMode) { }
+    CodeLocationNearCall() { }
+    explicit CodeLocationNearCall(MacroAssemblerCodePtr<tag> location, NearCallMode callMode)
+        : CodeLocationCommon<tag>(location), m_callMode(callMode) { }
     explicit CodeLocationNearCall(void* location, NearCallMode callMode)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)), m_callMode(callMode) { }
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)), m_callMode(callMode) { }
     NearCallMode callMode() { return m_callMode; }
 private:
     NearCallMode m_callMode = NearCallMode::Regular;
 };
 
-class CodeLocationDataLabel32 : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationDataLabel32 : public CodeLocationCommon<tag> {
 public:
-    CodeLocationDataLabel32() {}
-    explicit CodeLocationDataLabel32(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationDataLabel32() { }
+    explicit CodeLocationDataLabel32(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationDataLabel32(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 };
 
-class CodeLocationDataLabelCompact : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationDataLabelCompact : public CodeLocationCommon<tag> {
 public:
     CodeLocationDataLabelCompact() { }
-    explicit CodeLocationDataLabelCompact(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) { }
+    explicit CodeLocationDataLabelCompact(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationDataLabelCompact(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) { }
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 };
 
-class CodeLocationDataLabelPtr : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationDataLabelPtr : public CodeLocationCommon<tag> {
 public:
-    CodeLocationDataLabelPtr() {}
-    explicit CodeLocationDataLabelPtr(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) {}
+    CodeLocationDataLabelPtr() { }
+    explicit CodeLocationDataLabelPtr(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationDataLabelPtr(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) {}
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 };
 
-class CodeLocationConvertibleLoad : public CodeLocationCommon {
+template<PtrTag tag>
+class CodeLocationConvertibleLoad : public CodeLocationCommon<tag> {
 public:
     CodeLocationConvertibleLoad() { }
-    explicit CodeLocationConvertibleLoad(MacroAssemblerCodePtr location)
-        : CodeLocationCommon(location) { }
+    explicit CodeLocationConvertibleLoad(MacroAssemblerCodePtr<tag> location)
+        : CodeLocationCommon<tag>(location) { }
     explicit CodeLocationConvertibleLoad(void* location)
-        : CodeLocationCommon(MacroAssemblerCodePtr(location)) { }
+        : CodeLocationCommon<tag>(MacroAssemblerCodePtr<tag>(location)) { }
 };
 
-inline CodeLocationInstruction CodeLocationCommon::instructionAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationInstruction<resultTag> CodeLocationCommon<tag>::instructionAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationInstruction(dataLocation<char*>() + offset);
+    return CodeLocationInstruction<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationLabel CodeLocationCommon::labelAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationLabel<resultTag> CodeLocationCommon<tag>::labelAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationLabel(dataLocation<char*>() + offset);
+    return CodeLocationLabel<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationJump CodeLocationCommon::jumpAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationJump<resultTag> CodeLocationCommon<tag>::jumpAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationJump(dataLocation<char*>() + offset);
+    return CodeLocationJump<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationCall CodeLocationCommon::callAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationCall<resultTag> CodeLocationCommon<tag>::callAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationCall(dataLocation<char*>() + offset);
+    return CodeLocationCall<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationNearCall CodeLocationCommon::nearCallAtOffset(int offset, NearCallMode callMode)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationNearCall<resultTag> CodeLocationCommon<tag>::nearCallAtOffset(int offset, NearCallMode callMode)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationNearCall(dataLocation<char*>() + offset, callMode);
+    return CodeLocationNearCall<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset), callMode);
 }
 
-inline CodeLocationDataLabelPtr CodeLocationCommon::dataLabelPtrAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationDataLabelPtr<resultTag> CodeLocationCommon<tag>::dataLabelPtrAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationDataLabelPtr(dataLocation<char*>() + offset);
+    return CodeLocationDataLabelPtr<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationDataLabel32 CodeLocationCommon::dataLabel32AtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationDataLabel32<resultTag> CodeLocationCommon<tag>::dataLabel32AtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationDataLabel32(dataLocation<char*>() + offset);
+    return CodeLocationDataLabel32<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationDataLabelCompact CodeLocationCommon::dataLabelCompactAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationDataLabelCompact<resultTag> CodeLocationCommon<tag>::dataLabelCompactAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationDataLabelCompact(dataLocation<char*>() + offset);
+    return CodeLocationDataLabelCompact<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
-inline CodeLocationConvertibleLoad CodeLocationCommon::convertibleLoadAtOffset(int offset)
+template<PtrTag tag>
+template<PtrTag resultTag>
+inline CodeLocationConvertibleLoad<resultTag> CodeLocationCommon<tag>::convertibleLoadAtOffset(int offset)
 {
     ASSERT_VALID_CODE_OFFSET(offset);
-    return CodeLocationConvertibleLoad(dataLocation<char*>() + offset);
+    return CodeLocationConvertibleLoad<resultTag>(tagCodePtr<resultTag>(dataLocation<char*>() + offset));
 }
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/assembler/LinkBuffer.cpp b/Source/JavaScriptCore/assembler/LinkBuffer.cpp
index b982c28..bf55723 100644
--- a/Source/JavaScriptCore/assembler/LinkBuffer.cpp
+++ b/Source/JavaScriptCore/assembler/LinkBuffer.cpp
@@ -44,20 +44,20 @@
     return Options::dumpDisassembly();
 }
 
-LinkBuffer::CodeRef LinkBuffer::finalizeCodeWithoutDisassembly(PtrTag tag)
+LinkBuffer::CodeRef<LinkBufferPtrTag> LinkBuffer::finalizeCodeWithoutDisassemblyImpl()
 {
     performFinalization();
     
     ASSERT(m_didAllocate);
     if (m_executableMemory)
-        return CodeRef(*m_executableMemory, tag);
+        return CodeRef<LinkBufferPtrTag>(*m_executableMemory);
     
-    return CodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr(tagCodePtr(m_code, tag)));
+    return CodeRef<LinkBufferPtrTag>::createSelfManagedCodeRef(MacroAssemblerCodePtr<LinkBufferPtrTag>(tagCodePtr<LinkBufferPtrTag>(m_code)));
 }
 
-LinkBuffer::CodeRef LinkBuffer::finalizeCodeWithDisassembly(PtrTag tag, const char* format, ...)
+LinkBuffer::CodeRef<LinkBufferPtrTag> LinkBuffer::finalizeCodeWithDisassemblyImpl(const char* format, ...)
 {
-    CodeRef result = finalizeCodeWithoutDisassembly(tag);
+    CodeRef<LinkBufferPtrTag> result = finalizeCodeWithoutDisassemblyImpl();
 
     if (m_alreadyDisassembled)
         return result;
@@ -70,18 +70,19 @@
     va_end(argList);
     out.printf(":\n");
 
-    uint8_t* executableAddress = removeCodePtrTag<uint8_t*>(result.code().executableAddress());
+    uint8_t* executableAddress = result.code().untaggedExecutableAddress<uint8_t*>();
     out.printf("    Code at [%p, %p):\n", executableAddress, executableAddress + result.size());
     
     CString header = out.toCString();
     
     if (Options::asyncDisassembly()) {
-        disassembleAsynchronously(header, result, m_size, "    ");
+        CodeRef<DisassemblyPtrTag> codeRefForDisassembly = result.retagged<DisassemblyPtrTag>();
+        disassembleAsynchronously(header, WTFMove(codeRefForDisassembly), m_size, "    ");
         return result;
     }
     
     dataLog(header);
-    disassemble(result.code(), m_size, "    ", WTF::dataFile());
+    disassemble(result.retaggedCode<DisassemblyPtrTag>(), m_size, "    ", WTF::dataFile());
     
     return result;
 }
diff --git a/Source/JavaScriptCore/assembler/LinkBuffer.h b/Source/JavaScriptCore/assembler/LinkBuffer.h
index 2128610..1c4cc37 100644
--- a/Source/JavaScriptCore/assembler/LinkBuffer.h
+++ b/Source/JavaScriptCore/assembler/LinkBuffer.h
@@ -61,8 +61,8 @@
 class LinkBuffer {
     WTF_MAKE_NONCOPYABLE(LinkBuffer); WTF_MAKE_FAST_ALLOCATED;
     
-    typedef MacroAssemblerCodeRef CodeRef;
-    typedef MacroAssemblerCodePtr CodePtr;
+    template<PtrTag tag> using CodePtr = MacroAssemblerCodePtr<tag>;
+    template<PtrTag tag> using CodeRef = MacroAssemblerCodeRef<tag>;
     typedef MacroAssembler::Label Label;
     typedef MacroAssembler::Jump Jump;
     typedef MacroAssembler::PatchableJump PatchableJump;
@@ -121,32 +121,36 @@
     
     // These methods are used to link or set values at code generation time.
 
-    template<typename Func, typename = std::enable_if_t<std::is_function<typename std::remove_pointer<Func>::type>::value>>
-    void link(Call call, Func funcName, PtrTag tag)
+    template<PtrTag tag, typename Func, typename = std::enable_if_t<std::is_function<typename std::remove_pointer<Func>::type>::value>>
+    void link(Call call, Func funcName)
     {
-        FunctionPtr function(funcName, tag);
+        FunctionPtr<tag> function(funcName);
         link(call, function);
     }
 
-    void link(Call call, FunctionPtr function)
+    template<PtrTag tag>
+    void link(Call call, FunctionPtr<tag> function)
     {
         ASSERT(call.isFlagSet(Call::Linkable));
         call.m_label = applyOffset(call.m_label);
         MacroAssembler::linkCall(code(), call, function);
     }
     
-    void link(Call call, CodeLocationLabel label)
+    template<PtrTag tag>
+    void link(Call call, CodeLocationLabel<tag> label)
     {
-        link(call, FunctionPtr(label));
+        link(call, FunctionPtr<tag>(label));
     }
     
-    void link(Jump jump, CodeLocationLabel label)
+    template<PtrTag tag>
+    void link(Jump jump, CodeLocationLabel<tag> label)
     {
         jump.m_label = applyOffset(jump.m_label);
         MacroAssembler::linkJump(code(), jump, label);
     }
 
-    void link(const JumpList& list, CodeLocationLabel label)
+    template<PtrTag tag>
+    void link(const JumpList& list, CodeLocationLabel<tag> label)
     {
         for (const Jump& jump : list.jumps())
             link(jump, label);
@@ -158,7 +162,8 @@
         MacroAssembler::linkPointer(code(), target, value);
     }
 
-    void patch(DataLabelPtr label, CodeLocationLabel value)
+    template<PtrTag tag>
+    void patch(DataLabelPtr label, CodeLocationLabel<tag> value)
     {
         AssemblerLabel target = applyOffset(label.m_label);
         MacroAssembler::linkPointer(code(), target, value);
@@ -166,54 +171,63 @@
 
     // These methods are used to obtain handles to allow the code to be relinked / repatched later.
     
-    CodeLocationLabel entrypoint()
+    template<PtrTag tag>
+    CodeLocationLabel<tag> entrypoint()
     {
-        return CodeLocationLabel(code());
+        return CodeLocationLabel<tag>(tagCodePtr<tag>(code()));
     }
 
-    CodeLocationCall locationOf(Call call)
+    template<PtrTag tag>
+    CodeLocationCall<tag> locationOf(Call call)
     {
         ASSERT(call.isFlagSet(Call::Linkable));
         ASSERT(!call.isFlagSet(Call::Near));
-        return CodeLocationCall(MacroAssembler::getLinkerAddress(code(), applyOffset(call.m_label)));
+        return CodeLocationCall<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(call.m_label)));
     }
 
-    CodeLocationNearCall locationOfNearCall(Call call)
+    template<PtrTag tag>
+    CodeLocationNearCall<tag> locationOfNearCall(Call call)
     {
         ASSERT(call.isFlagSet(Call::Linkable));
         ASSERT(call.isFlagSet(Call::Near));
-        return CodeLocationNearCall(MacroAssembler::getLinkerAddress(code(), applyOffset(call.m_label)),
+        return CodeLocationNearCall<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(call.m_label)),
             call.isFlagSet(Call::Tail) ? NearCallMode::Tail : NearCallMode::Regular);
     }
 
-    CodeLocationLabel locationOf(PatchableJump jump, PtrTag tag = NoPtrTag)
+    template<PtrTag tag>
+    CodeLocationLabel<tag> locationOf(PatchableJump jump)
     {
-        return CodeLocationLabel(MacroAssembler::getLinkerAddress(code(), applyOffset(jump.m_jump.m_label), tag));
+        return CodeLocationLabel<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(jump.m_jump.m_label)));
     }
 
-    CodeLocationLabel locationOf(Label label, PtrTag tag = NoPtrTag)
+    template<PtrTag tag>
+    CodeLocationLabel<tag> locationOf(Label label)
     {
-        return CodeLocationLabel(MacroAssembler::getLinkerAddress(code(), applyOffset(label.m_label), tag));
+        return CodeLocationLabel<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(label.m_label)));
     }
 
-    CodeLocationDataLabelPtr locationOf(DataLabelPtr label)
+    template<PtrTag tag>
+    CodeLocationDataLabelPtr<tag> locationOf(DataLabelPtr label)
     {
-        return CodeLocationDataLabelPtr(MacroAssembler::getLinkerAddress(code(), applyOffset(label.m_label)));
+        return CodeLocationDataLabelPtr<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(label.m_label)));
     }
 
-    CodeLocationDataLabel32 locationOf(DataLabel32 label)
+    template<PtrTag tag>
+    CodeLocationDataLabel32<tag> locationOf(DataLabel32 label)
     {
-        return CodeLocationDataLabel32(MacroAssembler::getLinkerAddress(code(), applyOffset(label.m_label)));
+        return CodeLocationDataLabel32<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(label.m_label)));
     }
     
-    CodeLocationDataLabelCompact locationOf(DataLabelCompact label)
+    template<PtrTag tag>
+    CodeLocationDataLabelCompact<tag> locationOf(DataLabelCompact label)
     {
-        return CodeLocationDataLabelCompact(MacroAssembler::getLinkerAddress(code(), applyOffset(label.m_label)));
+        return CodeLocationDataLabelCompact<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(label.m_label)));
     }
 
-    CodeLocationConvertibleLoad locationOf(ConvertibleLoadLabel label)
+    template<PtrTag tag>
+    CodeLocationConvertibleLoad<tag> locationOf(ConvertibleLoadLabel label)
     {
-        return CodeLocationConvertibleLoad(MacroAssembler::getLinkerAddress(code(), applyOffset(label.m_label)));
+        return CodeLocationConvertibleLoad<tag>(MacroAssembler::getLinkerAddress<tag>(code(), applyOffset(label.m_label)));
     }
 
     // This method obtains the return address of the call, given as an offset from
@@ -238,13 +252,31 @@
     // complete generation of the code. Alternatively, call
     // finalizeCodeWithoutDisassembly() directly if you have your own way of
     // displaying disassembly.
-    
-    JS_EXPORT_PRIVATE CodeRef finalizeCodeWithoutDisassembly(PtrTag);
-    JS_EXPORT_PRIVATE CodeRef finalizeCodeWithDisassembly(PtrTag, const char* format, ...) WTF_ATTRIBUTE_PRINTF(3, 4);
 
-    CodePtr trampolineAt(Label label)
+    template<PtrTag tag>
+    CodeRef<tag> finalizeCodeWithoutDisassembly()
     {
-        return CodePtr(MacroAssembler::AssemblerType_T::getRelocatedAddress(code(), applyOffset(label.m_label)));
+        return finalizeCodeWithoutDisassemblyImpl().template retagged<tag>();
+    }
+
+    template<PtrTag tag, typename... Args>
+    CodeRef<tag> finalizeCodeWithDisassembly(const char* format, Args... args)
+    {
+#if COMPILER(CLANG)
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wformat-nonliteral"
+#pragma clang diagnostic ignored "-Wformat-security"
+#endif
+        return finalizeCodeWithDisassemblyImpl(format, args...).template retagged<tag>();
+#if COMPILER(CLANG)
+#pragma clang diagnostic pop
+#endif
+    }
+
+    template<PtrTag tag>
+    CodePtr<tag> trampolineAt(Label label)
+    {
+        return CodePtr<tag>(MacroAssembler::AssemblerType_T::getRelocatedAddress(code(), applyOffset(label.m_label)));
     }
 
     void* debugAddress()
@@ -258,6 +290,9 @@
     void didAlreadyDisassemble() { m_alreadyDisassembled = true; }
 
 private:
+    JS_EXPORT_PRIVATE CodeRef<LinkBufferPtrTag> finalizeCodeWithoutDisassemblyImpl();
+    JS_EXPORT_PRIVATE CodeRef<LinkBufferPtrTag> finalizeCodeWithDisassemblyImpl(const char* format, ...) WTF_ATTRIBUTE_PRINTF(2, 3);
+
 #if ENABLE(BRANCH_COMPACTION)
     int executableOffsetFor(int location)
     {
@@ -316,8 +351,8 @@
 
 #define FINALIZE_CODE_IF(condition, linkBufferReference, resultPtrTag, ...)  \
     (UNLIKELY((condition))                                              \
-        ? (linkBufferReference).finalizeCodeWithDisassembly(resultPtrTag, __VA_ARGS__) \
-        : (linkBufferReference).finalizeCodeWithoutDisassembly(resultPtrTag))
+        ? (linkBufferReference).finalizeCodeWithDisassembly<resultPtrTag>(__VA_ARGS__) \
+        : (linkBufferReference).finalizeCodeWithoutDisassembly<resultPtrTag>())
 
 bool shouldDumpDisassemblyFor(CodeBlock*);
 
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerARM.h b/Source/JavaScriptCore/assembler/MacroAssemblerARM.h
index b9f990c..cb91d28 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerARM.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerARM.h
@@ -1505,12 +1505,14 @@
         m_assembler.dmbISHST();
     }
 
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<tag> call)
     {
-        return FunctionPtr(reinterpret_cast<void(*)()>(ARMAssembler::readCallTarget(call.dataLocation())), CodePtrTag);
+        return FunctionPtr<resultTag>(reinterpret_cast<void(*)()>(ARMAssembler::readCallTarget(call.dataLocation())));
     }
 
-    static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
+    template<PtrTag startTag, PtrTag destTag>
+    static void replaceWithJump(CodeLocationLabel<startTag> instructionStart, CodeLocationLabel<destTag> destination)
     {
         ARMAssembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
     }
@@ -1528,44 +1530,52 @@
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return false; }
 
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
 
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
 
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         return label.labelAtOffset(0);
     }
 
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID reg, void* initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID reg, void* initialValue)
     {
         ARMAssembler::revertBranchPtrWithPatch(instructionStart.dataLocation(), reg, reinterpret_cast<uintptr_t>(initialValue) & 0xffff);
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel, Address, int32_t)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag>, Address, int32_t)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel, Address, void*)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag>, Address, void*)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         ARMAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
@@ -1619,7 +1629,8 @@
             m_assembler.cmp(ARMRegisters::S1, m_assembler.getImm(right.m_value, ARMRegisters::S0));
     }
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (call.isFlagSet(Call::Tail))
             ARMAssembler::linkJump(code, call.m_label, function.executableAddress());
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerARM64.h b/Source/JavaScriptCore/assembler/MacroAssemblerARM64.h
index f0ee3fd..52481bb 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerARM64.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerARM64.h
@@ -3772,17 +3772,20 @@
         }
     }
 
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<locationTag> call)
     {
-        return FunctionPtr(MacroAssemblerCodePtr(Assembler::readCallTarget(call.dataLocation())));
+        return FunctionPtr<resultTag>(MacroAssemblerCodePtr<resultTag>(Assembler::readCallTarget(call.dataLocation())));
     }
 
-    static void replaceWithVMHalt(CodeLocationLabel instructionStart)
+    template<PtrTag tag>
+    static void replaceWithVMHalt(CodeLocationLabel<tag> instructionStart)
     {
-        Assembler::replaceWithVMHalt(instructionStart.executableAddress());
+        Assembler::replaceWithVMHalt(instructionStart.dataLocation());
     }
 
-    static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
+    template<PtrTag startTag, PtrTag destTag>
+    static void replaceWithJump(CodeLocationLabel<startTag> instructionStart, CodeLocationLabel<destTag> destination)
     {
         Assembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
     }
@@ -3806,45 +3809,53 @@
 
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return false; }
-    
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         return label.labelAtOffset(0);
     }
-    
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
-    
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
-    
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue)
+
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID, void* initialValue)
     {
         reemitInitialMoveWithPatch(instructionStart.dataLocation(), initialValue);
     }
-    
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel, Address, void*)
+
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag>, Address, void*)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel, Address, int32_t)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag>, Address, int32_t)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         Assembler::repatchPointer(call.dataLabelPtrAtOffset(REPATCH_OFFSET_CALL_TO_POINTER).dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         Assembler::repatchPointer(call.dataLabelPtrAtOffset(REPATCH_OFFSET_CALL_TO_POINTER).dataLocation(), destination.executableAddress());
     }
@@ -4438,17 +4449,15 @@
 
     friend class LinkBuffer;
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (!call.isFlagSet(Call::Near))
             Assembler::linkPointer(code, call.m_label.labelAtOffset(REPATCH_OFFSET_CALL_TO_POINTER), function.executableAddress());
-        else if (call.isFlagSet(Call::Tail)) {
-            assertIsNotTagged(function.executableAddress());
-            Assembler::linkJump(code, call.m_label, function.executableAddress());
-        } else {
-            assertIsNotTagged(function.executableAddress());
-            Assembler::linkCall(code, call.m_label, function.executableAddress());
-        }
+        else if (call.isFlagSet(Call::Tail))
+            Assembler::linkJump(code, call.m_label, function.template retaggedExecutableAddress<NoPtrTag>());
+        else
+            Assembler::linkCall(code, call.m_label, function.template retaggedExecutableAddress<NoPtrTag>());
     }
 
     CachedTempRegister m_dataMemoryTempRegister;
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h b/Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h
index 866f80e..cd10459 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h
@@ -1382,7 +1382,8 @@
         m_assembler.dmbISHST();
     }
 
-    static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
+    template<PtrTag startTag, PtrTag destTag>
+    static void replaceWithJump(CodeLocationLabel<startTag> instructionStart, CodeLocationLabel<destTag> destination)
     {
         ARMv7Assembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
     }
@@ -1993,22 +1994,24 @@
         return tailRecursiveCall();
     }
 
-    
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<locationTag> call)
     {
-        return FunctionPtr(reinterpret_cast<void(*)()>(ARMv7Assembler::readCallTarget(call.dataLocation())), CodePtrTag);
+        return FunctionPtr<resultTag>(reinterpret_cast<void(*)()>(ARMv7Assembler::readCallTarget(call.dataLocation())));
     }
     
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return false; }
     
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         const unsigned twoWordOpSize = 4;
         return label.labelAtOffset(-twoWordOpSize * 2);
     }
     
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID rd, void* initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID rd, void* initialValue)
     {
 #if OS(LINUX)
         ARMv7Assembler::revertJumpTo_movT3movtcmpT2(instructionStart.dataLocation(), rd, dataTempRegister, reinterpret_cast<uintptr_t>(initialValue));
@@ -2017,35 +2020,41 @@
         ARMv7Assembler::revertJumpTo_movT3(instructionStart.dataLocation(), dataTempRegister, ARMThumbImmediate::makeUInt16(reinterpret_cast<uintptr_t>(initialValue) & 0xffff));
 #endif
     }
-    
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
-    
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
-    
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel, Address, void*)
+
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag>, Address, void*)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel, Address, int32_t)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag>, Address, int32_t)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         ARMv7Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         ARMv7Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
@@ -2144,7 +2153,8 @@
 private:
     friend class LinkBuffer;
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (call.isFlagSet(Call::Tail))
             ARMv7Assembler::linkJump(code, call.m_label, function.executableAddress());
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.cpp b/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.cpp
index bb86961..35aeba7 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.cpp
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.cpp
@@ -28,62 +28,41 @@
 
 #include "Disassembler.h"
 #include "JSCInlines.h"
-#include "LLIntData.h"
+#include "PtrTag.h"
 #include <mutex>
 
 namespace JSC {
 
-MacroAssemblerCodePtr MacroAssemblerCodePtr::createLLIntCodePtr(OpcodeID opcodeID)
+void MacroAssemblerCodePtrBase::dumpWithName(void* executableAddress, void* dataLocation, const char* name, PrintStream& out)
 {
-    ASSERT(opcodeID >= NUMBER_OF_BYTECODE_IDS);
-    return createFromExecutableAddress(LLInt::getCodePtr(opcodeID));
-}
-
-void MacroAssemblerCodePtr::dumpWithName(const char* name, PrintStream& out) const
-{
-    if (!m_value) {
+    if (!executableAddress) {
         out.print(name, "(null)");
         return;
     }
-    if (executableAddress() == dataLocation()) {
-        out.print(name, "(", RawPointer(executableAddress()), ")");
+    if (executableAddress == dataLocation) {
+        out.print(name, "(", RawPointer(executableAddress), ")");
         return;
     }
-    out.print(name, "(executable = ", RawPointer(executableAddress()), ", dataLocation = ", RawPointer(dataLocation()), ")");
+    out.print(name, "(executable = ", RawPointer(executableAddress), ", dataLocation = ", RawPointer(dataLocation), ")");
 }
 
-void MacroAssemblerCodePtr::dump(PrintStream& out) const
+bool MacroAssemblerCodeRefBase::tryToDisassemble(MacroAssemblerCodePtr<DisassemblyPtrTag> codePtr, size_t size, const char* prefix, PrintStream& out)
 {
-    dumpWithName("CodePtr", out);
+    return JSC::tryToDisassemble(codePtr, size, prefix, out);
 }
 
-MacroAssemblerCodeRef MacroAssemblerCodeRef::createLLIntCodeRef(OpcodeID opcodeID)
+bool MacroAssemblerCodeRefBase::tryToDisassemble(MacroAssemblerCodePtr<DisassemblyPtrTag> codePtr, size_t size, const char* prefix)
 {
-    return createSelfManagedCodeRef(MacroAssemblerCodePtr::createLLIntCodePtr(opcodeID));
+    return tryToDisassemble(codePtr, size, prefix, WTF::dataFile());
 }
 
-bool MacroAssemblerCodeRef::tryToDisassemble(PrintStream& out, const char* prefix) const
-{
-    return JSC::tryToDisassemble(m_codePtr, size(), prefix, out);
-}
-
-bool MacroAssemblerCodeRef::tryToDisassemble(const char* prefix) const
-{
-    return tryToDisassemble(WTF::dataFile(), prefix);
-}
-
-CString MacroAssemblerCodeRef::disassembly() const
+CString MacroAssemblerCodeRefBase::disassembly(MacroAssemblerCodePtr<DisassemblyPtrTag> codePtr, size_t size)
 {
     StringPrintStream out;
-    if (!tryToDisassemble(out, ""))
+    if (!tryToDisassemble(codePtr, size, "", out))
         return CString();
     return out.toCString();
 }
 
-void MacroAssemblerCodeRef::dump(PrintStream& out) const
-{
-    m_codePtr.dumpWithName("CodeRef", out);
-}
-
 } // namespace JSC
 
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.h b/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.h
index 0f9fa06..a894874 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerCodeRef.h
@@ -40,11 +40,14 @@
 // into the processor are decorated with the bottom bit set, while traditional ARM has
 // the lower bit clear. Since we don't know what kind of pointer, we check for both
 // decorated and undecorated null.
+#define ASSERT_NULL_OR_VALID_CODE_POINTER(ptr) \
+    ASSERT(!ptr || reinterpret_cast<intptr_t>(ptr) & ~1)
 #define ASSERT_VALID_CODE_POINTER(ptr) \
     ASSERT(reinterpret_cast<intptr_t>(ptr) & ~1)
 #define ASSERT_VALID_CODE_OFFSET(offset) \
     ASSERT(!(offset & 1)) // Must be multiple of 2.
 #else
+#define ASSERT_NULL_OR_VALID_CODE_POINTER(ptr) // Anything goes!
 #define ASSERT_VALID_CODE_POINTER(ptr) \
     ASSERT(ptr)
 #define ASSERT_VALID_CODE_OFFSET(offset) // Anything goes!
@@ -52,7 +55,7 @@
 
 namespace JSC {
 
-class MacroAssemblerCodePtr;
+template<PtrTag> class MacroAssemblerCodePtr;
 
 enum OpcodeID : unsigned;
 
@@ -60,17 +63,19 @@
 //
 // FunctionPtr should be used to wrap pointers to C/C++ functions in JSC
 // (particularly, the stub functions).
+template<PtrTag tag = CFunctionPtrTag>
 class FunctionPtr {
 public:
     FunctionPtr() { }
+    FunctionPtr(std::nullptr_t) { }
 
     template<typename ReturnType, typename... Arguments>
-    FunctionPtr(ReturnType(*value)(Arguments...), PtrTag tag = CFunctionPtrTag)
-        : m_value(tagCFunctionPtr<void*>(value, tag))
+    FunctionPtr(ReturnType(*value)(Arguments...))
+        : m_value(tagCFunctionPtr<void*, tag>(value))
     {
-        assertIsCFunctionPtr(value);
+        assertIsNullOrCFunctionPtr(value);
         PoisonedMasmPtr::assertIsNotPoisoned(m_value);
-        ASSERT_VALID_CODE_POINTER(m_value);
+        ASSERT_NULL_OR_VALID_CODE_POINTER(m_value);
     }
 
 // MSVC doesn't seem to treat functions with different calling conventions as
@@ -78,12 +83,12 @@
 #if CALLING_CONVENTION_IS_STDCALL && !OS(WINDOWS)
 
     template<typename ReturnType, typename... Arguments>
-    FunctionPtr(ReturnType(CDECL *value)(Arguments...), PtrTag tag = CFunctionPtrTag)
-        : m_value(tagCFunctionPtr<void*>(value, tag))
+    FunctionPtr(ReturnType(CDECL *value)(Arguments...))
+        : m_value(tagCFunctionPtr<void*, tag>(value))
     {
-        assertIsCFunctionPtr(value);
+        assertIsNullOrCFunctionPtr(value);
         PoisonedMasmPtr::assertIsNotPoisoned(m_value);
-        ASSERT_VALID_CODE_POINTER(m_value);
+        ASSERT_NULL_OR_VALID_CODE_POINTER(m_value);
     }
 
 #endif // CALLING_CONVENTION_IS_STDCALL && !OS(WINDOWS)
@@ -91,37 +96,37 @@
 #if COMPILER_SUPPORTS(FASTCALL_CALLING_CONVENTION)
 
     template<typename ReturnType, typename... Arguments>
-    FunctionPtr(ReturnType(FASTCALL *value)(Arguments...), PtrTag tag = CFunctionPtrTag)
-        : m_value(tagCFunctionPtr<void*>(value, tag))
+    FunctionPtr(ReturnType(FASTCALL *value)(Arguments...))
+        : m_value(tagCFunctionPtr<void*, tag>(value))
     {
-        assertIsCFunctionPtr(value);
+        assertIsNullOrCFunctionPtr(value);
         PoisonedMasmPtr::assertIsNotPoisoned(m_value);
-        ASSERT_VALID_CODE_POINTER(m_value);
+        ASSERT_NULL_OR_VALID_CODE_POINTER(m_value);
     }
 
 #endif // COMPILER_SUPPORTS(FASTCALL_CALLING_CONVENTION)
 
     template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_function<typename std::remove_pointer<PtrType>::type>::value>>
-    explicit FunctionPtr(PtrType value, PtrTag tag)
+    explicit FunctionPtr(PtrType value)
         // Using a C-ctyle cast here to avoid compiler error on RVTC:
         // Error:  #694: reinterpret_cast cannot cast away const or other type qualifiers
         // (I guess on RVTC function pointers have a different constness to GCC/MSVC?)
-        : m_value(tagCFunctionPtr<void*>(value, tag))
+        : m_value(tagCFunctionPtr<void*, tag>(value))
     {
-        assertIsCFunctionPtr(value);
+        assertIsNullOrCFunctionPtr(value);
         PoisonedMasmPtr::assertIsNotPoisoned(m_value);
-        ASSERT_VALID_CODE_POINTER(m_value);
+        ASSERT_NULL_OR_VALID_CODE_POINTER(m_value);
     }
 
-    explicit FunctionPtr(FunctionPtr other, PtrTag tag)
-        : m_value(tagCFunctionPtr<void*>(other.executableAddress(), tag))
-    {
-        assertIsCFunctionPtr(other.executableAddress());
-        PoisonedMasmPtr::assertIsNotPoisoned(m_value);
-        ASSERT_VALID_CODE_POINTER(m_value);
-    }
+    explicit FunctionPtr(MacroAssemblerCodePtr<tag>);
 
-    explicit FunctionPtr(MacroAssemblerCodePtr);
+    template<PtrTag otherTag>
+    FunctionPtr<otherTag> retagged() const
+    {
+        if (!m_value)
+            return FunctionPtr<otherTag>();
+        return FunctionPtr<otherTag>(*this);
+    }
 
     void* executableAddress() const
     {
@@ -129,16 +134,36 @@
         return m_value;
     }
 
+    template<PtrTag newTag>
+    void* retaggedExecutableAddress() const
+    {
+        PoisonedMasmPtr::assertIsNotPoisoned(m_value);
+        return retagCodePtr<tag, newTag>(m_value);
+    }
+
     explicit operator bool() const { return !!m_value; }
     bool operator!() const { return !m_value; }
 
+    bool operator==(const FunctionPtr& other) const { return m_value == other.m_value; }
+    bool operator!=(const FunctionPtr& other) const { return m_value != other.m_value; }
+
 private:
+    template<PtrTag otherTag>
+    explicit FunctionPtr(const FunctionPtr<otherTag>& other)
+        : m_value(retagCodePtr<otherTag, tag>(other.executableAddress()))
+    {
+        PoisonedMasmPtr::assertIsNotPoisoned(m_value);
+        ASSERT_NULL_OR_VALID_CODE_POINTER(m_value);
+    }
+
     void* m_value { nullptr };
+
+    template<PtrTag> friend class FunctionPtr;
 };
 
-static_assert(sizeof(FunctionPtr) == sizeof(void*), "");
+static_assert(sizeof(FunctionPtr<CFunctionPtrTag>) == sizeof(void*), "");
 #if COMPILER_SUPPORTS(BUILTIN_IS_TRIVIALLY_COPYABLE)
-static_assert(__is_trivially_copyable(FunctionPtr), "");
+static_assert(__is_trivially_copyable(FunctionPtr<CFunctionPtrTag>), "");
 #endif
 
 // ReturnAddressPtr:
@@ -158,8 +183,9 @@
         ASSERT_VALID_CODE_POINTER(m_value);
     }
 
-    explicit ReturnAddressPtr(FunctionPtr function)
-        : m_value(function.executableAddress())
+    template<PtrTag tag>
+    explicit ReturnAddressPtr(FunctionPtr<tag> function)
+        : m_value(untagCodePtr<tag>(function.executableAddress()))
     {
         PoisonedMasmPtr::assertIsNotPoisoned(m_value);
         ASSERT_VALID_CODE_POINTER(m_value);
@@ -183,9 +209,16 @@
 // MacroAssemblerCodePtr:
 //
 // MacroAssemblerCodePtr should be used to wrap pointers to JIT generated code.
-class MacroAssemblerCodePtr {
+class MacroAssemblerCodePtrBase {
+protected:
+    static void dumpWithName(void* executableAddress, void* dataLocation, const char* name, PrintStream& out);
+};
+
+template<PtrTag tag>
+class MacroAssemblerCodePtr : private MacroAssemblerCodePtrBase {
 public:
-    MacroAssemblerCodePtr() { }
+    MacroAssemblerCodePtr() = default;
+    MacroAssemblerCodePtr(std::nullptr_t) : m_value(nullptr) { }
 
     explicit MacroAssemblerCodePtr(void* value)
 #if CPU(ARM_THUMB2)
@@ -195,6 +228,7 @@
         : m_value(value)
 #endif
     {
+        assertIsTaggedWith(value, tag);
         m_value.assertIsPoisoned();
         ASSERT(value);
 #if CPU(ARM_THUMB2)
@@ -207,17 +241,17 @@
     {
         ASSERT(value);
         ASSERT_VALID_CODE_POINTER(value);
+        assertIsTaggedWith(value, tag);
         MacroAssemblerCodePtr result;
         result.m_value = PoisonedMasmPtr(value);
         result.m_value.assertIsPoisoned();
         return result;
     }
 
-    static MacroAssemblerCodePtr createLLIntCodePtr(OpcodeID codeId);
-
     explicit MacroAssemblerCodePtr(ReturnAddressPtr ra)
-        : m_value(ra.value())
+        : m_value(tagCodePtr<tag>(ra.value()))
     {
+        assertIsNotTagged(ra.value());
         ASSERT(ra.value());
         m_value.assertIsPoisoned();
         ASSERT_VALID_CODE_POINTER(m_value.unpoisoned());
@@ -225,9 +259,12 @@
 
     PoisonedMasmPtr poisonedPtr() const { return m_value; }
 
-    MacroAssemblerCodePtr retagged(PtrTag oldTag, PtrTag newTag) const
+    template<PtrTag newTag>
+    MacroAssemblerCodePtr<newTag> retagged() const
     {
-        return MacroAssemblerCodePtr::createFromExecutableAddress(retagCodePtr(executableAddress(), oldTag, newTag));
+        if (!m_value)
+            return MacroAssemblerCodePtr<newTag>();
+        return MacroAssemblerCodePtr<newTag>::createFromExecutableAddress(retaggedExecutableAddress<newTag>());
     }
 
     template<typename T = void*>
@@ -236,6 +273,21 @@
         m_value.assertIsPoisoned();
         return m_value.unpoisoned<T>();
     }
+
+    template<typename T = void*>
+    T untaggedExecutableAddress() const
+    {
+        m_value.assertIsPoisoned();
+        return untagCodePtr<T, tag>(m_value.unpoisoned());
+    }
+
+    template<PtrTag newTag, typename T = void*>
+    T retaggedExecutableAddress() const
+    {
+        m_value.assertIsPoisoned();
+        return retagCodePtr<T, tag, newTag>(m_value.unpoisoned());
+    }
+
 #if CPU(ARM_THUMB2)
     // To use this pointer as a data address remove the decoration.
     template<typename T = void*>
@@ -251,7 +303,7 @@
     {
         m_value.assertIsPoisoned();
         ASSERT_VALID_CODE_POINTER(m_value);
-        return bitwise_cast<T>(m_value ? removeCodePtrTag(m_value.unpoisoned()) : nullptr);
+        return untagCodePtr<T, tag>(m_value.unpoisoned());
     }
 #endif
 
@@ -281,10 +333,13 @@
     template<typename T, typename = std::enable_if_t<!std::is_same<T, bool>::value>>
     operator T() = delete;
 
-    void dumpWithName(const char* name, PrintStream& out) const;
-    
-    void dump(PrintStream& out) const;
-    
+    void dumpWithName(const char* name, PrintStream& out) const
+    {
+        MacroAssemblerCodePtrBase::dumpWithName(executableAddress(), dataLocation(), name, out);
+    }
+
+    void dump(PrintStream& out) const { dumpWithName("CodePtr", out); }
+
     enum EmptyValueTag { EmptyValue };
     enum DeletedValueTag { DeletedValue };
     
@@ -310,9 +365,10 @@
     PoisonedMasmPtr m_value;
 };
 
+template<PtrTag tag>
 struct MacroAssemblerCodePtrHash {
-    static unsigned hash(const MacroAssemblerCodePtr& ptr) { return ptr.hash(); }
-    static bool equal(const MacroAssemblerCodePtr& a, const MacroAssemblerCodePtr& b)
+    static unsigned hash(const MacroAssemblerCodePtr<tag>& ptr) { return ptr.hash(); }
+    static bool equal(const MacroAssemblerCodePtr<tag>& a, const MacroAssemblerCodePtr<tag>& b)
     {
         return a == b;
     }
@@ -324,23 +380,29 @@
 // A reference to a section of JIT generated code.  A CodeRef consists of a
 // pointer to the code, and a ref pointer to the pool from within which it
 // was allocated.
-class MacroAssemblerCodeRef {
+class MacroAssemblerCodeRefBase {
+protected:
+    static bool tryToDisassemble(MacroAssemblerCodePtr<DisassemblyPtrTag>, size_t, const char* prefix, PrintStream& out);
+    static bool tryToDisassemble(MacroAssemblerCodePtr<DisassemblyPtrTag>, size_t, const char* prefix);
+    JS_EXPORT_PRIVATE static CString disassembly(MacroAssemblerCodePtr<DisassemblyPtrTag>, size_t);
+};
+
+template<PtrTag tag>
+class MacroAssemblerCodeRef : private MacroAssemblerCodeRefBase {
 private:
     // This is private because it's dangerous enough that we want uses of it
     // to be easy to find - hence the static create method below.
-    explicit MacroAssemblerCodeRef(MacroAssemblerCodePtr codePtr)
+    explicit MacroAssemblerCodeRef(MacroAssemblerCodePtr<tag> codePtr)
         : m_codePtr(codePtr)
     {
         ASSERT(m_codePtr);
     }
 
 public:
-    MacroAssemblerCodeRef()
-    {
-    }
+    MacroAssemblerCodeRef() = default;
 
-    MacroAssemblerCodeRef(Ref<ExecutableMemoryHandle>&& executableMemory, PtrTag tag)
-        : m_codePtr(tagCodePtr(executableMemory->start(), tag))
+    MacroAssemblerCodeRef(Ref<ExecutableMemoryHandle>&& executableMemory)
+        : m_codePtr(tagCodePtr<tag>(executableMemory->start()))
         , m_executableMemory(WTFMove(executableMemory))
     {
         ASSERT(m_executableMemory->isManaged());
@@ -351,27 +413,31 @@
     // Use this only when you know that the codePtr refers to code that is
     // already being kept alive through some other means. Typically this means
     // that codePtr is immortal.
-    static MacroAssemblerCodeRef createSelfManagedCodeRef(MacroAssemblerCodePtr codePtr)
+    static MacroAssemblerCodeRef createSelfManagedCodeRef(MacroAssemblerCodePtr<tag> codePtr)
     {
         return MacroAssemblerCodeRef(codePtr);
     }
     
-    // Helper for creating self-managed code refs from LLInt.
-    static MacroAssemblerCodeRef createLLIntCodeRef(OpcodeID codeId);
-
     ExecutableMemoryHandle* executableMemory() const
     {
         return m_executableMemory.get();
     }
     
-    MacroAssemblerCodePtr code() const
+    MacroAssemblerCodePtr<tag> code() const
     {
         return m_codePtr;
     }
 
-    MacroAssemblerCodePtr retaggedCode(PtrTag oldTag, PtrTag newTag) const
+    template<PtrTag newTag>
+    MacroAssemblerCodePtr<newTag> retaggedCode() const
     {
-        return m_codePtr.retagged(oldTag, newTag);
+        return m_codePtr.template retagged<newTag>();
+    }
+
+    template<PtrTag newTag>
+    MacroAssemblerCodeRef<newTag> retagged() const
+    {
+        return MacroAssemblerCodeRef<newTag>(*this);
     }
 
     size_t size() const
@@ -381,22 +447,43 @@
         return m_executableMemory->sizeInBytes();
     }
 
-    bool tryToDisassemble(PrintStream& out, const char* prefix = "") const;
+    bool tryToDisassemble(PrintStream& out, const char* prefix = "") const
+    {
+        return tryToDisassemble(retaggedCode<DisassemblyPtrTag>(), size(), prefix, out);
+    }
     
-    bool tryToDisassemble(const char* prefix = "") const;
+    bool tryToDisassemble(const char* prefix = "") const
+    {
+        return tryToDisassemble(retaggedCode<DisassemblyPtrTag>(), size(), prefix);
+    }
     
-    JS_EXPORT_PRIVATE CString disassembly() const;
+    CString disassembly() const
+    {
+        return MacroAssemblerCodeRefBase::disassembly(retaggedCode<DisassemblyPtrTag>(), size());
+    }
     
     explicit operator bool() const { return !!m_codePtr; }
     
-    void dump(PrintStream& out) const;
+    void dump(PrintStream& out) const
+    {
+        m_codePtr.dumpWithName("CodeRef", out);
+    }
 
 private:
-    MacroAssemblerCodePtr m_codePtr;
+    template<PtrTag otherTag>
+    MacroAssemblerCodeRef(const MacroAssemblerCodeRef<otherTag>& otherCodeRef)
+        : m_codePtr(otherCodeRef.code().template retaggedExecutableAddress<tag>())
+        , m_executableMemory(otherCodeRef.m_executableMemory)
+    { }
+
+    MacroAssemblerCodePtr<tag> m_codePtr;
     RefPtr<ExecutableMemoryHandle> m_executableMemory;
+
+    template<PtrTag> friend class MacroAssemblerCodeRef;
 };
 
-inline FunctionPtr::FunctionPtr(MacroAssemblerCodePtr ptr)
+template<PtrTag tag>
+inline FunctionPtr<tag>::FunctionPtr(MacroAssemblerCodePtr<tag> ptr)
     : m_value(ptr.executableAddress())
 {
     PoisonedMasmPtr::assertIsNotPoisoned(m_value);
@@ -407,11 +494,11 @@
 namespace WTF {
 
 template<typename T> struct DefaultHash;
-template<> struct DefaultHash<JSC::MacroAssemblerCodePtr> {
-    typedef JSC::MacroAssemblerCodePtrHash Hash;
+template<JSC::PtrTag tag> struct DefaultHash<JSC::MacroAssemblerCodePtr<tag>> {
+    typedef JSC::MacroAssemblerCodePtrHash<tag> Hash;
 };
 
 template<typename T> struct HashTraits;
-template<> struct HashTraits<JSC::MacroAssemblerCodePtr> : public CustomHashTraits<JSC::MacroAssemblerCodePtr> { };
+template<JSC::PtrTag tag> struct HashTraits<JSC::MacroAssemblerCodePtr<tag>> : public CustomHashTraits<JSC::MacroAssemblerCodePtr<tag>> { };
 
 } // namespace WTF
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerMIPS.h b/Source/JavaScriptCore/assembler/MacroAssemblerMIPS.h
index baee36f..b946586 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerMIPS.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerMIPS.h
@@ -3395,12 +3395,14 @@
         abortWithReason(reason);
     }
 
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<locationTag> call)
     {
-        return FunctionPtr(reinterpret_cast<void(*)()>(MIPSAssembler::readCallTarget(call.dataLocation())), CodePtrTag);
+        return FunctionPtr<resultTag>(reinterpret_cast<void(*)()>(MIPSAssembler::readCallTarget(call.dataLocation())));
     }
 
-    static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
+    template<PtrTag startTag, PtrTag destTag>
+    static void replaceWithJump(CodeLocationLabel<startTag> instructionStart, CodeLocationLabel<destTag> destination)
     {
         MIPSAssembler::replaceWithJump(instructionStart.dataLocation(), destination.dataLocation());
     }
@@ -3419,44 +3421,52 @@
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return false; }
 
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
 
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         return label.labelAtOffset(0);
     }
 
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID, void* initialValue)
     {
         MIPSAssembler::revertJumpToMove(instructionStart.dataLocation(), immTempRegister, reinterpret_cast<int>(initialValue) & 0xffff);
     }
 
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag>)
     {
         UNREACHABLE_FOR_PLATFORM();
-        return CodeLocationLabel();
+        return CodeLocationLabel<tag>();
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel, Address, int32_t)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag>, Address, int32_t)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel, Address, void*)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag>, Address, void*)
     {
         UNREACHABLE_FOR_PLATFORM();
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         MIPSAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         MIPSAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
@@ -3468,7 +3478,8 @@
 
     friend class LinkBuffer;
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (call.isFlagSet(Call::Tail))
             MIPSAssembler::linkJump(code, call.m_label, function.executableAddress());
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerX86.h b/Source/JavaScriptCore/assembler/MacroAssemblerX86.h
index b99a210..8bc023d 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerX86.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerX86.h
@@ -298,17 +298,19 @@
     static bool supportsFloatingPointTruncate() { return isSSE2Present(); }
     static bool supportsFloatingPointSqrt() { return isSSE2Present(); }
     static bool supportsFloatingPointAbs() { return isSSE2Present(); }
-    
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<locationTag> call)
     {
         intptr_t offset = reinterpret_cast<int32_t*>(call.dataLocation())[-1];
-        return FunctionPtr(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(call.dataLocation()) + offset), CodePtrTag);
+        return FunctionPtr<resultTag>(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(call.dataLocation()) + offset));
     }
 
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return true; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return true; }
-    
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         const int opcodeBytes = 1;
         const int modRMBytes = 1;
@@ -317,8 +319,9 @@
         ASSERT(totalBytes >= maxJumpReplacementSize());
         return label.labelAtOffset(-totalBytes);
     }
-    
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag> label)
     {
         const int opcodeBytes = 1;
         const int modRMBytes = 1;
@@ -328,8 +331,9 @@
         ASSERT(totalBytes >= maxJumpReplacementSize());
         return label.labelAtOffset(-totalBytes);
     }
-    
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32 label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag> label)
     {
         const int opcodeBytes = 1;
         const int modRMBytes = 1;
@@ -339,30 +343,35 @@
         ASSERT(totalBytes >= maxJumpReplacementSize());
         return label.labelAtOffset(-totalBytes);
     }
-    
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID reg, void* initialValue)
+
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID reg, void* initialValue)
     {
         X86Assembler::revertJumpTo_cmpl_ir_force32(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), reg);
     }
 
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart, Address address, void* initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, Address address, void* initialValue)
     {
         ASSERT(!address.offset);
         X86Assembler::revertJumpTo_cmpl_im_force32(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), 0, address.base);
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel instructionStart, Address address, int32_t initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag> instructionStart, Address address, int32_t initialValue)
     {
         ASSERT(!address.offset);
         X86Assembler::revertJumpTo_cmpl_im_force32(instructionStart.executableAddress(), initialValue, 0, address.base);
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         X86Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         X86Assembler::relinkCall(call.dataLocation(), destination.executableAddress());
     }
@@ -370,7 +379,8 @@
 private:
     friend class LinkBuffer;
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (call.isFlagSet(Call::Tail))
             X86Assembler::linkJump(code, call.m_label, function.executableAddress());
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerX86Common.h b/Source/JavaScriptCore/assembler/MacroAssemblerX86Common.h
index 84912d1..e8c8599 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerX86Common.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerX86Common.h
@@ -1203,8 +1203,9 @@
         m_assembler.movl_mr_disp8(address.offset, address.base, dest);
         return DataLabelCompact(this);
     }
-    
-    static void repatchCompact(CodeLocationDataLabelCompact dataLabelCompact, int32_t value)
+
+    template<PtrTag tag>
+    static void repatchCompact(CodeLocationDataLabelCompact<tag> dataLabelCompact, int32_t value)
     {
         ASSERT(isCompactPtrAlignedAddressOffset(value));
         AssemblerType_T::repatchCompact(dataLabelCompact.dataLocation(), value);
@@ -3879,12 +3880,14 @@
     }
 #endif
 
-    static void replaceWithVMHalt(CodeLocationLabel instructionStart)
+    template<PtrTag tag>
+    static void replaceWithVMHalt(CodeLocationLabel<tag> instructionStart)
     {
         X86Assembler::replaceWithHlt(instructionStart.executableAddress());
     }
 
-    static void replaceWithJump(CodeLocationLabel instructionStart, CodeLocationLabel destination)
+    template<PtrTag startTag, PtrTag destTag>
+    static void replaceWithJump(CodeLocationLabel<startTag> instructionStart, CodeLocationLabel<destTag> destination)
     {
         X86Assembler::replaceWithJump(instructionStart.executableAddress(), destination.executableAddress());
     }
diff --git a/Source/JavaScriptCore/assembler/MacroAssemblerX86_64.h b/Source/JavaScriptCore/assembler/MacroAssemblerX86_64.h
index 0f08d93..d4428cc 100644
--- a/Source/JavaScriptCore/assembler/MacroAssemblerX86_64.h
+++ b/Source/JavaScriptCore/assembler/MacroAssemblerX86_64.h
@@ -1884,9 +1884,10 @@
     static bool supportsFloatingPointSqrt() { return true; }
     static bool supportsFloatingPointAbs() { return true; }
     
-    static FunctionPtr readCallTarget(CodeLocationCall call)
+    template<PtrTag resultTag, PtrTag locationTag>
+    static FunctionPtr<resultTag> readCallTarget(CodeLocationCall<locationTag> call)
     {
-        return FunctionPtr(X86Assembler::readPointer(call.dataLabelPtrAtOffset(-REPATCH_OFFSET_CALL_R11).dataLocation()), CodePtrTag);
+        return FunctionPtr<resultTag>(X86Assembler::readPointer(call.dataLabelPtrAtOffset(-REPATCH_OFFSET_CALL_R11).dataLocation()));
     }
 
     bool haveScratchRegisterForBlinding() { return m_allowScratchRegister; }
@@ -1894,8 +1895,9 @@
 
     static bool canJumpReplacePatchableBranchPtrWithPatch() { return true; }
     static bool canJumpReplacePatchableBranch32WithPatch() { return true; }
-    
-    static CodeLocationLabel startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr<tag> label)
     {
         const int rexBytes = 1;
         const int opcodeBytes = 1;
@@ -1904,8 +1906,9 @@
         ASSERT(totalBytes >= maxJumpReplacementSize());
         return label.labelAtOffset(-totalBytes);
     }
-    
-    static CodeLocationLabel startOfBranch32WithPatchOnRegister(CodeLocationDataLabel32 label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfBranch32WithPatchOnRegister(CodeLocationDataLabel32<tag> label)
     {
         const int rexBytes = 1;
         const int opcodeBytes = 1;
@@ -1914,38 +1917,45 @@
         ASSERT(totalBytes >= maxJumpReplacementSize());
         return label.labelAtOffset(-totalBytes);
     }
-    
-    static CodeLocationLabel startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr label)
+
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr<tag> label)
     {
         return startOfBranchPtrWithPatchOnRegister(label);
     }
 
-    static CodeLocationLabel startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32 label)
+    template<PtrTag tag>
+    static CodeLocationLabel<tag> startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32<tag> label)
     {
         return startOfBranch32WithPatchOnRegister(label);
     }
-    
-    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart, Address, void* initialValue)
+
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, Address, void* initialValue)
     {
         X86Assembler::revertJumpTo_movq_i64r(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), s_scratchRegister);
     }
 
-    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel instructionStart, Address, int32_t initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel<tag> instructionStart, Address, int32_t initialValue)
     {
         X86Assembler::revertJumpTo_movl_i32r(instructionStart.executableAddress(), initialValue, s_scratchRegister);
     }
 
-    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart, RegisterID, void* initialValue)
+    template<PtrTag tag>
+    static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel<tag> instructionStart, RegisterID, void* initialValue)
     {
         X86Assembler::revertJumpTo_movq_i64r(instructionStart.executableAddress(), reinterpret_cast<intptr_t>(initialValue), s_scratchRegister);
     }
 
-    static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, CodeLocationLabel<destTag> destination)
     {
         X86Assembler::repatchPointer(call.dataLabelPtrAtOffset(-REPATCH_OFFSET_CALL_R11).dataLocation(), destination.executableAddress());
     }
 
-    static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+    template<PtrTag callTag, PtrTag destTag>
+    static void repatchCall(CodeLocationCall<callTag> call, FunctionPtr<destTag> destination)
     {
         X86Assembler::repatchPointer(call.dataLabelPtrAtOffset(-REPATCH_OFFSET_CALL_R11).dataLocation(), destination.executableAddress());
     }
@@ -1966,7 +1976,8 @@
 
     friend class LinkBuffer;
 
-    static void linkCall(void* code, Call call, FunctionPtr function)
+    template<PtrTag tag>
+    static void linkCall(void* code, Call call, FunctionPtr<tag> function)
     {
         if (!call.isFlagSet(Call::Near))
             X86Assembler::linkPointer(code, call.m_label.labelAtOffset(-REPATCH_OFFSET_CALL_R11), function.executableAddress());
diff --git a/Source/JavaScriptCore/assembler/testmasm.cpp b/Source/JavaScriptCore/assembler/testmasm.cpp
index 86c42b0..f18c412 100644
--- a/Source/JavaScriptCore/assembler/testmasm.cpp
+++ b/Source/JavaScriptCore/assembler/testmasm.cpp
@@ -144,18 +144,18 @@
 }
 #endif // ENABLE(MASM_PROBE)
 
-MacroAssemblerCodeRef compile(Generator&& generate)
+MacroAssemblerCodeRef<JSEntryPtrTag> compile(Generator&& generate)
 {
     CCallHelpers jit;
     generate(jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    return FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testmasm compilation");
+    return FINALIZE_CODE(linkBuffer, JSEntryPtrTag, "testmasm compilation");
 }
 
 template<typename T, typename... Arguments>
-T invoke(MacroAssemblerCodeRef code, Arguments... arguments)
+T invoke(MacroAssemblerCodeRef<JSEntryPtrTag> code, Arguments... arguments)
 {
-    void* executableAddress = untagCFunctionPtr(code.code().executableAddress(), JITCodePtrTag);
+    void* executableAddress = untagCFunctionPtr<JSEntryPtrTag>(code.code().executableAddress());
     T (*function)(Arguments...) = bitwise_cast<T(*)(Arguments...)>(executableAddress);
     return function(arguments...);
 }
@@ -593,7 +593,7 @@
     unsigned probeCallCount = 0;
     bool continuationWasReached = false;
 
-    MacroAssemblerCodeRef continuation = compile([&] (CCallHelpers& jit) {
+    MacroAssemblerCodeRef<JSEntryPtrTag> continuation = compile([&] (CCallHelpers& jit) {
         // Validate that we reached the continuation.
         jit.probe([&] (Probe::Context&) {
             probeCallCount++;
@@ -610,7 +610,7 @@
         // Write expected values into the registers.
         jit.probe([&] (Probe::Context& context) {
             probeCallCount++;
-            context.cpu.pc() = untagCodePtr(continuation.code().executableAddress(), JITCodePtrTag);
+            context.cpu.pc() = untagCodePtr(continuation.code().executableAddress(), JSEntryPtrTag);
         });
 
         jit.breakpoint(); // We should never get here.
diff --git a/Source/JavaScriptCore/b3/B3Compilation.cpp b/Source/JavaScriptCore/b3/B3Compilation.cpp
index 9e20a6b..707cbef 100644
--- a/Source/JavaScriptCore/b3/B3Compilation.cpp
+++ b/Source/JavaScriptCore/b3/B3Compilation.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -33,7 +33,7 @@
 
 namespace JSC { namespace B3 {
 
-Compilation::Compilation(MacroAssemblerCodeRef codeRef, std::unique_ptr<OpaqueByproducts> byproducts)
+Compilation::Compilation(MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef, std::unique_ptr<OpaqueByproducts> byproducts)
     : m_codeRef(codeRef)
     , m_byproducts(WTFMove(byproducts))
 {
diff --git a/Source/JavaScriptCore/b3/B3Compilation.h b/Source/JavaScriptCore/b3/B3Compilation.h
index 7398652..0bd063e 100644
--- a/Source/JavaScriptCore/b3/B3Compilation.h
+++ b/Source/JavaScriptCore/b3/B3Compilation.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -48,17 +48,17 @@
     WTF_MAKE_FAST_ALLOCATED;
 
 public:
-    JS_EXPORT_PRIVATE Compilation(MacroAssemblerCodeRef, std::unique_ptr<OpaqueByproducts>);
+    JS_EXPORT_PRIVATE Compilation(MacroAssemblerCodeRef<B3CompilationPtrTag>, std::unique_ptr<OpaqueByproducts>);
     JS_EXPORT_PRIVATE Compilation(Compilation&&);
     JS_EXPORT_PRIVATE ~Compilation();
 
-    MacroAssemblerCodePtr code() const { return m_codeRef.code(); }
-    MacroAssemblerCodeRef codeRef() const { return m_codeRef; }
+    MacroAssemblerCodePtr<B3CompilationPtrTag> code() const { return m_codeRef.code(); }
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef() const { return m_codeRef; }
     
     CString disassembly() const { return m_codeRef.disassembly(); }
 
 private:
-    MacroAssemblerCodeRef m_codeRef;
+    MacroAssemblerCodeRef<B3CompilationPtrTag> m_codeRef;
     std::unique_ptr<OpaqueByproducts> m_byproducts;
 };
 
diff --git a/Source/JavaScriptCore/b3/B3Compile.cpp b/Source/JavaScriptCore/b3/B3Compile.cpp
index 75c6547..355d1e7 100644
--- a/Source/JavaScriptCore/b3/B3Compile.cpp
+++ b/Source/JavaScriptCore/b3/B3Compile.cpp
@@ -48,7 +48,7 @@
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
 
-    return Compilation(FINALIZE_CODE(linkBuffer, JITCodePtrTag, "B3::Compilation"), proc.releaseByproducts());
+    return Compilation(FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "B3::Compilation"), proc.releaseByproducts());
 }
 
 } } // namespace JSC::B3
diff --git a/Source/JavaScriptCore/b3/B3LowerMacros.cpp b/Source/JavaScriptCore/b3/B3LowerMacros.cpp
index f432ceb..d4c755a 100644
--- a/Source/JavaScriptCore/b3/B3LowerMacros.cpp
+++ b/Source/JavaScriptCore/b3/B3LowerMacros.cpp
@@ -499,20 +499,20 @@
                 patchpoint->setGenerator(
                     [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                         AllowMacroScratchRegisterUsage allowScratch(jit);
-                        
-                        MacroAssemblerCodePtr* jumpTable = static_cast<MacroAssemblerCodePtr*>(
-                            params.proc().addDataSection(sizeof(MacroAssemblerCodePtr) * tableSize));
-                        
+
+                        using JumpTableCodePtr = MacroAssemblerCodePtr<JSSwitchPtrTag>;
+                        JumpTableCodePtr* jumpTable = static_cast<JumpTableCodePtr*>(
+                            params.proc().addDataSection(sizeof(JumpTableCodePtr) * tableSize));
+
                         GPRReg index = params[0].gpr();
                         GPRReg scratch = params.gpScratch(0);
                         GPRReg poisonScratch = params.gpScratch(1);
-                        PtrTag switchTag = ptrTag(SwitchTablePtrTag, nextPtrTagID());
 
                         jit.move(CCallHelpers::TrustedImm64(JITCodePoison::key()), poisonScratch);
                         jit.move(CCallHelpers::TrustedImmPtr(jumpTable), scratch);
                         jit.load64(CCallHelpers::BaseIndex(scratch, index, CCallHelpers::timesPtr()), scratch);
                         jit.xor64(poisonScratch, scratch);
-                        jit.jump(scratch, switchTag);
+                        jit.jump(scratch, JSSwitchPtrTag);
 
                         // These labels are guaranteed to be populated before either late paths or
                         // link tasks run.
@@ -521,14 +521,14 @@
                         jit.addLinkTask(
                             [=] (LinkBuffer& linkBuffer) {
                                 if (hasUnhandledIndex) {
-                                    MacroAssemblerCodePtr fallThrough = linkBuffer.locationOf(*labels.last(), switchTag);
+                                    JumpTableCodePtr fallThrough = linkBuffer.locationOf<JSSwitchPtrTag>(*labels.last());
                                     for (unsigned i = tableSize; i--;)
                                         jumpTable[i] = fallThrough;
                                 }
                                 
                                 unsigned labelIndex = 0;
                                 for (unsigned tableIndex : handledIndices)
-                                    jumpTable[tableIndex] = linkBuffer.locationOf(*labels[labelIndex++], switchTag);
+                                    jumpTable[tableIndex] = linkBuffer.locationOf<JSSwitchPtrTag>(*labels[labelIndex++]);
                             });
                     });
                 return;
diff --git a/Source/JavaScriptCore/b3/air/AirDisassembler.cpp b/Source/JavaScriptCore/b3/air/AirDisassembler.cpp
index f2913d4..e5fe0b5 100644
--- a/Source/JavaScriptCore/b3/air/AirDisassembler.cpp
+++ b/Source/JavaScriptCore/b3/air/AirDisassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -74,10 +74,10 @@
     auto dumpAsmRange = [&] (CCallHelpers::Label startLabel, CCallHelpers::Label endLabel) {
         RELEASE_ASSERT(startLabel.isSet());
         RELEASE_ASSERT(endLabel.isSet());
-        CodeLocationLabel start = linkBuffer.locationOf(startLabel);
-        CodeLocationLabel end = linkBuffer.locationOf(endLabel);
-        RELEASE_ASSERT(bitwise_cast<uintptr_t>(end.executableAddress()) >= bitwise_cast<uintptr_t>(start.executableAddress()));
-        disassemble(start, bitwise_cast<uintptr_t>(end.executableAddress()) - bitwise_cast<uintptr_t>(start.executableAddress()), asmPrefix, out);
+        CodeLocationLabel<DisassemblyPtrTag> start = linkBuffer.locationOf<DisassemblyPtrTag>(startLabel);
+        CodeLocationLabel<DisassemblyPtrTag> end = linkBuffer.locationOf<DisassemblyPtrTag>(endLabel);
+        RELEASE_ASSERT(end.dataLocation<uintptr_t>() >= start.dataLocation<uintptr_t>());
+        disassemble(start, end.dataLocation<uintptr_t>() - start.dataLocation<uintptr_t>(), asmPrefix, out);
     };
 
     for (BasicBlock* block : m_blocks) {
diff --git a/Source/JavaScriptCore/b3/air/testair.cpp b/Source/JavaScriptCore/b3/air/testair.cpp
index d6bffe9..e534c45 100644
--- a/Source/JavaScriptCore/b3/air/testair.cpp
+++ b/Source/JavaScriptCore/b3/air/testair.cpp
@@ -90,13 +90,13 @@
     LinkBuffer linkBuffer(jit, nullptr);
 
     return std::make_unique<B3::Compilation>(
-        FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testair compilation"), proc.releaseByproducts());
+        FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testair compilation"), proc.releaseByproducts());
 }
 
 template<typename T, typename... Arguments>
 T invoke(const B3::Compilation& code, Arguments... arguments)
 {
-    void* executableAddress = untagCFunctionPtr(code.code().executableAddress(), JITCodePtrTag);
+    void* executableAddress = untagCFunctionPtr(code.code().executableAddress(), B3CompilationPtrTag);
     T (*function)(Arguments...) = bitwise_cast<T(*)(Arguments...)>(executableAddress);
     return function(arguments...);
 }
diff --git a/Source/JavaScriptCore/b3/testb3.cpp b/Source/JavaScriptCore/b3/testb3.cpp
index 6dc9177..51054ed 100644
--- a/Source/JavaScriptCore/b3/testb3.cpp
+++ b/Source/JavaScriptCore/b3/testb3.cpp
@@ -125,9 +125,9 @@
 }
 
 template<typename T, typename... Arguments>
-T invoke(MacroAssemblerCodePtr ptr, Arguments... arguments)
+T invoke(MacroAssemblerCodePtr<B3CompilationPtrTag> ptr, Arguments... arguments)
 {
-    void* executableAddress = untagCFunctionPtr(ptr.executableAddress(), JITCodePtrTag);
+    void* executableAddress = untagCFunctionPtr<B3CompilationPtrTag>(ptr.executableAddress());
     T (*function)(Arguments...) = bitwise_cast<T(*)(Arguments...)>(executableAddress);
     return function(arguments...);
 }
@@ -13033,8 +13033,8 @@
             AllowMacroScratchRegisterUsage allowScratch(jit);
             Vector<Box<CCallHelpers::Label>> labels = params.successorLabels();
 
-            MacroAssemblerCodePtr* jumpTable = bitwise_cast<MacroAssemblerCodePtr*>(
-                params.proc().addDataSection(sizeof(MacroAssemblerCodePtr) * labels.size()));
+            MacroAssemblerCodePtr<B3CompilationPtrTag>* jumpTable = bitwise_cast<MacroAssemblerCodePtr<B3CompilationPtrTag>*>(
+                params.proc().addDataSection(sizeof(MacroAssemblerCodePtr<B3CompilationPtrTag>) * labels.size()));
 
             GPRReg scratch = params.gpScratch(0);
             GPRReg poisonScratch = params.gpScratch(1);
@@ -13043,13 +13043,12 @@
             jit.move(CCallHelpers::TrustedImm64(JITCodePoison::key()), poisonScratch);
             jit.load64(CCallHelpers::BaseIndex(scratch, params[0].gpr(), CCallHelpers::timesPtr()), scratch);
             jit.xor64(poisonScratch, scratch);
-            PtrTag switchTag = ptrTag(SwitchTablePtrTag, nextPtrTagID());
-            jit.jump(scratch, switchTag);
+            jit.jump(scratch, B3CompilationPtrTag);
 
             jit.addLinkTask(
                 [&, jumpTable, labels] (LinkBuffer& linkBuffer) {
                     for (unsigned i = labels.size(); i--;)
-                        jumpTable[i] = linkBuffer.locationOf(*labels[i], switchTag);
+                        jumpTable[i] = linkBuffer.locationOf<B3CompilationPtrTag>(*labels[i]);
                 });
         });
     
@@ -13298,11 +13297,11 @@
     CCallHelpers jit;
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    CodeLocationLabel labelOne = linkBuffer.locationOf(proc.entrypointLabel(0), JITCodePtrTag);
-    CodeLocationLabel labelTwo = linkBuffer.locationOf(proc.entrypointLabel(1), JITCodePtrTag);
-    CodeLocationLabel labelThree = linkBuffer.locationOf(proc.entrypointLabel(2), JITCodePtrTag);
+    CodeLocationLabel<B3CompilationPtrTag> labelOne = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(0));
+    CodeLocationLabel<B3CompilationPtrTag> labelTwo = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(1));
+    CodeLocationLabel<B3CompilationPtrTag> labelThree = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(2));
 
-    MacroAssemblerCodeRef codeRef = FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testb3 compilation");
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef = FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testb3 compilation");
 
     CHECK(invoke<int>(labelOne, 1, 2) == 3);
     CHECK(invoke<int>(labelTwo, 1, 2) == -1);
@@ -13331,11 +13330,11 @@
     CCallHelpers jit;
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    CodeLocationLabel labelOne = linkBuffer.locationOf(proc.entrypointLabel(0), JITCodePtrTag);
-    CodeLocationLabel labelTwo = linkBuffer.locationOf(proc.entrypointLabel(1), JITCodePtrTag);
-    CodeLocationLabel labelThree = linkBuffer.locationOf(proc.entrypointLabel(2), JITCodePtrTag);
+    CodeLocationLabel<B3CompilationPtrTag> labelOne = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(0));
+    CodeLocationLabel<B3CompilationPtrTag> labelTwo = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(1));
+    CodeLocationLabel<B3CompilationPtrTag> labelThree = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(2));
 
-    MacroAssemblerCodeRef codeRef = FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testb3 compilation");
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef = FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testb3 compilation");
 
     CHECK_EQ(invoke<int>(labelOne, 1, 2), 3);
     CHECK_EQ(invoke<int>(labelTwo, 1, 2), 3);
@@ -13418,11 +13417,11 @@
     CCallHelpers jit;
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    CodeLocationLabel labelOne = linkBuffer.locationOf(proc.entrypointLabel(0), JITCodePtrTag);
-    CodeLocationLabel labelTwo = linkBuffer.locationOf(proc.entrypointLabel(1), JITCodePtrTag);
-    CodeLocationLabel labelThree = linkBuffer.locationOf(proc.entrypointLabel(2), JITCodePtrTag);
+    CodeLocationLabel<B3CompilationPtrTag> labelOne = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(0));
+    CodeLocationLabel<B3CompilationPtrTag> labelTwo = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(1));
+    CodeLocationLabel<B3CompilationPtrTag> labelThree = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(2));
 
-    MacroAssemblerCodeRef codeRef = FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testb3 compilation");
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef = FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testb3 compilation");
 
     CHECK_EQ(invoke<int>(labelOne, 1, 2, 10), 3);
     CHECK_EQ(invoke<int>(labelTwo, 1, 2, 10), -1);
@@ -13535,11 +13534,11 @@
     CCallHelpers jit;
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    CodeLocationLabel labelOne = linkBuffer.locationOf(proc.entrypointLabel(0), JITCodePtrTag);
-    CodeLocationLabel labelTwo = linkBuffer.locationOf(proc.entrypointLabel(1), JITCodePtrTag);
-    CodeLocationLabel labelThree = linkBuffer.locationOf(proc.entrypointLabel(2), JITCodePtrTag);
+    CodeLocationLabel<B3CompilationPtrTag> labelOne = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(0));
+    CodeLocationLabel<B3CompilationPtrTag> labelTwo = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(1));
+    CodeLocationLabel<B3CompilationPtrTag> labelThree = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(2));
 
-    MacroAssemblerCodeRef codeRef = FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testb3 compilation");
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef = FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testb3 compilation");
 
     CHECK_EQ(invoke<int>(labelOne, 1, 2, 10, false), 3);
     CHECK_EQ(invoke<int>(labelTwo, 1, 2, 10, false), -1);
@@ -13613,10 +13612,10 @@
     CCallHelpers jit;
     generate(proc, jit);
     LinkBuffer linkBuffer(jit, nullptr);
-    CodeLocationLabel labelOne = linkBuffer.locationOf(proc.entrypointLabel(0), JITCodePtrTag);
-    CodeLocationLabel labelTwo = linkBuffer.locationOf(proc.entrypointLabel(1), JITCodePtrTag);
+    CodeLocationLabel<B3CompilationPtrTag> labelOne = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(0));
+    CodeLocationLabel<B3CompilationPtrTag> labelTwo = linkBuffer.locationOf<B3CompilationPtrTag>(proc.entrypointLabel(1));
 
-    MacroAssemblerCodeRef codeRef = FINALIZE_CODE(linkBuffer, JITCodePtrTag, "testb3 compilation");
+    MacroAssemblerCodeRef<B3CompilationPtrTag> codeRef = FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "testb3 compilation");
 
     CHECK(invoke<int>(labelOne, 0) == 1);
     CHECK(invoke<int>(labelOne, 42) == 43);
diff --git a/Source/JavaScriptCore/bytecode/AccessCase.cpp b/Source/JavaScriptCore/bytecode/AccessCase.cpp
index d09af25..9698593 100644
--- a/Source/JavaScriptCore/bytecode/AccessCase.cpp
+++ b/Source/JavaScriptCore/bytecode/AccessCase.cpp
@@ -818,14 +818,13 @@
 
             jit.addLinkTask([=, &vm] (LinkBuffer& linkBuffer) {
                 this->as<GetterSetterAccessCase>().callLinkInfo()->setCallLocations(
-                    CodeLocationLabel(linkBuffer.locationOfNearCall(slowPathCall)),
-                    CodeLocationLabel(linkBuffer.locationOf(addressOfLinkFunctionCheck)),
-                    linkBuffer.locationOfNearCall(fastPathCall));
+                    CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(slowPathCall)),
+                    CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(addressOfLinkFunctionCheck)),
+                    linkBuffer.locationOfNearCall<JSEntryPtrTag>(fastPathCall));
 
-                PtrTag linkTag = ptrTag(LinkCallPtrTag, &vm);
                 linkBuffer.link(
                     slowPathCall,
-                    CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag)));
+                    CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkCallThunkGenerator).code()));
             });
         } else {
             ASSERT(m_type == CustomValueGetter || m_type == CustomAccessorGetter || m_type == CustomValueSetter || m_type == CustomAccessorSetter);
@@ -855,10 +854,9 @@
             }
             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
 
-            PtrTag callTag = ptrTag(GetterSetterPtrTag, nextPtrTagID());
-            operationCall = jit.call(callTag);
+            operationCall = jit.call(OperationPtrTag);
             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-                linkBuffer.link(operationCall, FunctionPtr(this->as<GetterSetterAccessCase>().m_customAccessor.opaque, callTag));
+                linkBuffer.link(operationCall, this->as<GetterSetterAccessCase>().m_customAccessor);
             });
 
             if (m_type == CustomValueGetter || m_type == CustomAccessorGetter)
@@ -1000,12 +998,11 @@
                 if (!reallocating) {
                     jit.setupArguments<decltype(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity)>(baseGPR);
                     
-                    PtrTag callTag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-                    CCallHelpers::Call operationCall = jit.call(callTag);
+                    CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
                         linkBuffer.link(
                             operationCall,
-                            FunctionPtr(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity, callTag));
+                            FunctionPtr<OperationPtrTag>(operationReallocateButterflyToHavePropertyStorageWithInitialCapacity));
                     });
                 } else {
                     // Handle the case where we are reallocating (i.e. the old structure/butterfly
@@ -1013,12 +1010,11 @@
                     jit.setupArguments<decltype(operationReallocateButterflyToGrowPropertyStorage)>(
                         baseGPR, CCallHelpers::TrustedImm32(newSize / sizeof(JSValue)));
                     
-                    PtrTag callTag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-                    CCallHelpers::Call operationCall = jit.call(callTag);
+                    CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
                     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
                         linkBuffer.link(
                             operationCall,
-                            FunctionPtr(operationReallocateButterflyToGrowPropertyStorage, callTag));
+                            FunctionPtr<OperationPtrTag>(operationReallocateButterflyToGrowPropertyStorage));
                     });
                 }
                 
diff --git a/Source/JavaScriptCore/bytecode/AccessCaseSnippetParams.cpp b/Source/JavaScriptCore/bytecode/AccessCaseSnippetParams.cpp
index 1c2e561..2bd9fe6 100644
--- a/Source/JavaScriptCore/bytecode/AccessCaseSnippetParams.cpp
+++ b/Source/JavaScriptCore/bytecode/AccessCaseSnippetParams.cpp
@@ -61,11 +61,10 @@
 
         jit.setupArguments<FunctionType>(std::get<ArgumentsIndex>(m_arguments)...);
 
-        PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-        CCallHelpers::Call operationCall = jit.call(tag);
+        CCallHelpers::Call operationCall = jit.call(OperationPtrTag);
         auto function = m_function;
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(operationCall, FunctionPtr(function, tag));
+            linkBuffer.link(operationCall, FunctionPtr<OperationPtrTag>(function));
         });
 
         jit.setupResults(m_result);
diff --git a/Source/JavaScriptCore/bytecode/ByValInfo.h b/Source/JavaScriptCore/bytecode/ByValInfo.h
index 661675f..ac1d44d 100644
--- a/Source/JavaScriptCore/bytecode/ByValInfo.h
+++ b/Source/JavaScriptCore/bytecode/ByValInfo.h
@@ -226,7 +226,7 @@
 struct ByValInfo {
     ByValInfo() { }
 
-    ByValInfo(unsigned bytecodeIndex, CodeLocationJump notIndexJump, CodeLocationJump badTypeJump, CodeLocationLabel exceptionHandler, JITArrayMode arrayMode, ArrayProfile* arrayProfile, int16_t badTypeJumpToDone, int16_t badTypeJumpToNextHotPath, int16_t returnAddressToSlowPath)
+    ByValInfo(unsigned bytecodeIndex, CodeLocationJump<JSEntryPtrTag> notIndexJump, CodeLocationJump<JSEntryPtrTag> badTypeJump, CodeLocationLabel<ExceptionHandlerPtrTag> exceptionHandler, JITArrayMode arrayMode, ArrayProfile* arrayProfile, int16_t badTypeJumpToDone, int16_t badTypeJumpToNextHotPath, int16_t returnAddressToSlowPath)
         : bytecodeIndex(bytecodeIndex)
         , notIndexJump(notIndexJump)
         , badTypeJump(badTypeJump)
@@ -244,9 +244,9 @@
     }
 
     unsigned bytecodeIndex;
-    CodeLocationJump notIndexJump;
-    CodeLocationJump badTypeJump;
-    CodeLocationLabel exceptionHandler;
+    CodeLocationJump<JSEntryPtrTag> notIndexJump;
+    CodeLocationJump<JSEntryPtrTag> badTypeJump;
+    CodeLocationLabel<ExceptionHandlerPtrTag> exceptionHandler;
     JITArrayMode arrayMode; // The array mode that was baked into the inline JIT code.
     ArrayProfile* arrayProfile;
     int16_t badTypeJumpToDone;
diff --git a/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp b/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
index f236014..d5e2ca2 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
+++ b/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2014, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -97,25 +97,25 @@
     RELEASE_ASSERT(!isOnList());
 }
 
-CodeLocationNearCall CallLinkInfo::callReturnLocation()
+CodeLocationNearCall<JSEntryPtrTag> CallLinkInfo::callReturnLocation()
 {
     RELEASE_ASSERT(!isDirect());
-    return CodeLocationNearCall(m_callReturnLocationOrPatchableJump, Regular);
+    return CodeLocationNearCall<JSEntryPtrTag>(m_callReturnLocationOrPatchableJump, Regular);
 }
 
-CodeLocationJump CallLinkInfo::patchableJump()
+CodeLocationJump<JSEntryPtrTag> CallLinkInfo::patchableJump()
 {
     RELEASE_ASSERT(callType() == DirectTailCall);
-    return CodeLocationJump(m_callReturnLocationOrPatchableJump);
+    return CodeLocationJump<JSEntryPtrTag>(m_callReturnLocationOrPatchableJump);
 }
 
-CodeLocationDataLabelPtr CallLinkInfo::hotPathBegin()
+CodeLocationDataLabelPtr<JSEntryPtrTag> CallLinkInfo::hotPathBegin()
 {
     RELEASE_ASSERT(!isDirect());
-    return CodeLocationDataLabelPtr(m_hotPathBeginOrSlowPathStart);
+    return CodeLocationDataLabelPtr<JSEntryPtrTag>(m_hotPathBeginOrSlowPathStart);
 }
 
-CodeLocationLabel CallLinkInfo::slowPathStart()
+CodeLocationLabel<JSEntryPtrTag> CallLinkInfo::slowPathStart()
 {
     RELEASE_ASSERT(isDirect());
     return m_hotPathBeginOrSlowPathStart;
diff --git a/Source/JavaScriptCore/bytecode/CallLinkInfo.h b/Source/JavaScriptCore/bytecode/CallLinkInfo.h
index 5bc2c44..2863264 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkInfo.h
+++ b/Source/JavaScriptCore/bytecode/CallLinkInfo.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2014-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -165,9 +165,9 @@
     }
 
     void setCallLocations(
-        CodeLocationLabel callReturnLocationOrPatchableJump,
-        CodeLocationLabel hotPathBeginOrSlowPathStart,
-        CodeLocationNearCall hotPathOther)
+        CodeLocationLabel<JSEntryPtrTag> callReturnLocationOrPatchableJump,
+        CodeLocationLabel<JSEntryPtrTag> hotPathBeginOrSlowPathStart,
+        CodeLocationNearCall<JSEntryPtrTag> hotPathOther)
     {
         m_callReturnLocationOrPatchableJump = callReturnLocationOrPatchableJump;
         m_hotPathBeginOrSlowPathStart = hotPathBeginOrSlowPathStart;
@@ -181,12 +181,12 @@
         m_allowStubs = false;
     }
 
-    CodeLocationNearCall callReturnLocation();
-    CodeLocationJump patchableJump();
-    CodeLocationDataLabelPtr hotPathBegin();
-    CodeLocationLabel slowPathStart();
+    CodeLocationNearCall<JSEntryPtrTag> callReturnLocation();
+    CodeLocationJump<JSEntryPtrTag> patchableJump();
+    CodeLocationDataLabelPtr<JSEntryPtrTag> hotPathBegin();
+    CodeLocationLabel<JSEntryPtrTag> slowPathStart();
 
-    CodeLocationNearCall hotPathOther()
+    CodeLocationNearCall<JSEntryPtrTag> hotPathOther()
     {
         return m_hotPathOther;
     }
@@ -327,9 +327,11 @@
     }
 
 private:
-    CodeLocationLabel m_callReturnLocationOrPatchableJump;
-    CodeLocationLabel m_hotPathBeginOrSlowPathStart;
-    CodeLocationNearCall m_hotPathOther;
+    // FIXME: These should be tagged with JSInternalPtrTag instead of JSEntryTag.
+    // https://bugs.webkit.org/show_bug.cgi?id=184712
+    CodeLocationLabel<JSEntryPtrTag> m_callReturnLocationOrPatchableJump;
+    CodeLocationLabel<JSEntryPtrTag> m_hotPathBeginOrSlowPathStart;
+    CodeLocationNearCall<JSEntryPtrTag> m_hotPathOther;
     WriteBarrier<JSCell> m_calleeOrCodeBlock;
     WriteBarrier<JSCell> m_lastSeenCalleeOrExecutable;
     RefPtr<PolymorphicCallStubRoutine> m_stub;
diff --git a/Source/JavaScriptCore/bytecode/CodeBlock.cpp b/Source/JavaScriptCore/bytecode/CodeBlock.cpp
index 76c0381..cf6684c 100644
--- a/Source/JavaScriptCore/bytecode/CodeBlock.cpp
+++ b/Source/JavaScriptCore/bytecode/CodeBlock.cpp
@@ -467,7 +467,7 @@
                 const UnlinkedHandlerInfo& unlinkedHandler = unlinkedCodeBlock->exceptionHandler(i);
                 HandlerInfo& handler = m_rareData->m_exceptionHandlers[i];
 #if ENABLE(JIT)
-                handler.initialize(unlinkedHandler, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(LLInt::getCodePtr(op_catch))));
+                handler.initialize(unlinkedHandler, CodeLocationLabel<ExceptionHandlerPtrTag>(LLInt::getCodePtr<BytecodePtrTag>(op_catch).retagged<ExceptionHandlerPtrTag>()));
 #else
                 handler.initialize(unlinkedHandler);
 #endif
diff --git a/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp b/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
index c2b54a0..04a3767 100644
--- a/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
+++ b/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
@@ -256,7 +256,7 @@
             case ComplexGetStatus::Inlineable: {
                 std::unique_ptr<CallLinkStatus> callLinkStatus;
                 JSFunction* intrinsicFunction = nullptr;
-                PropertySlot::GetValueFunc customAccessorGetter = nullptr;
+                FunctionPtr<OperationPtrTag> customAccessorGetter;
                 std::optional<DOMAttributeAnnotation> domAttribute;
 
                 switch (access.type()) {
@@ -278,7 +278,7 @@
                     break;
                 }
                 case AccessCase::CustomAccessorGetter: {
-                    customAccessorGetter = bitwise_cast<PropertySlot::GetValueFunc>(access.as<GetterSetterAccessCase>().customAccessor());
+                    customAccessorGetter = access.as<GetterSetterAccessCase>().customAccessor();
                     domAttribute = access.as<GetterSetterAccessCase>().domAttribute();
                     if (!domAttribute)
                         return GetByIdStatus(slowPathState, true);
diff --git a/Source/JavaScriptCore/bytecode/GetByIdVariant.cpp b/Source/JavaScriptCore/bytecode/GetByIdVariant.cpp
index 937074d..bbe8efb 100644
--- a/Source/JavaScriptCore/bytecode/GetByIdVariant.cpp
+++ b/Source/JavaScriptCore/bytecode/GetByIdVariant.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2014-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -37,7 +37,7 @@
     const ObjectPropertyConditionSet& conditionSet,
     std::unique_ptr<CallLinkStatus> callLinkStatus,
     JSFunction* intrinsicFunction,
-    PropertySlot::GetValueFunc customAccessorGetter,
+    FunctionPtr<OperationPtrTag> customAccessorGetter,
     std::optional<DOMAttributeAnnotation> domAttribute)
     : m_structureSet(structureSet)
     , m_conditionSet(conditionSet)
@@ -155,7 +155,7 @@
     if (m_intrinsicFunction)
         out.print(", intrinsic = ", *m_intrinsicFunction);
     if (m_customAccessorGetter)
-        out.print(", customaccessorgetter = ", RawPointer(bitwise_cast<const void*>(m_customAccessorGetter)));
+        out.print(", customaccessorgetter = ", RawPointer(m_customAccessorGetter.executableAddress()));
     if (m_domAttribute) {
         out.print(", domclass = ", RawPointer(m_domAttribute->classInfo));
         if (m_domAttribute->domJIT)
diff --git a/Source/JavaScriptCore/bytecode/GetByIdVariant.h b/Source/JavaScriptCore/bytecode/GetByIdVariant.h
index b3fb26a..4a16278 100644
--- a/Source/JavaScriptCore/bytecode/GetByIdVariant.h
+++ b/Source/JavaScriptCore/bytecode/GetByIdVariant.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2014-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -46,7 +46,7 @@
         const ObjectPropertyConditionSet& = ObjectPropertyConditionSet(),
         std::unique_ptr<CallLinkStatus> = nullptr,
         JSFunction* = nullptr,
-        PropertySlot::GetValueFunc = nullptr,
+        FunctionPtr<OperationPtrTag> customAccessorGetter = nullptr,
         std::optional<DOMAttributeAnnotation> = std::nullopt);
 
     ~GetByIdVariant();
@@ -66,7 +66,7 @@
     CallLinkStatus* callLinkStatus() const { return m_callLinkStatus.get(); }
     JSFunction* intrinsicFunction() const { return m_intrinsicFunction; }
     Intrinsic intrinsic() const { return m_intrinsicFunction ? m_intrinsicFunction->intrinsic() : NoIntrinsic; }
-    PropertySlot::GetValueFunc customAccessorGetter() const { return m_customAccessorGetter; }
+    FunctionPtr<OperationPtrTag> customAccessorGetter() const { return m_customAccessorGetter; }
     std::optional<DOMAttributeAnnotation> domAttribute() const { return m_domAttribute; }
 
     bool isPropertyUnset() const { return offset() == invalidOffset; }
@@ -86,7 +86,7 @@
     PropertyOffset m_offset;
     std::unique_ptr<CallLinkStatus> m_callLinkStatus;
     JSFunction* m_intrinsicFunction;
-    PropertySlot::GetValueFunc m_customAccessorGetter;
+    FunctionPtr<OperationPtrTag> m_customAccessorGetter;
     std::optional<DOMAttributeAnnotation> m_domAttribute;
 };
 
diff --git a/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.cpp b/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.cpp
index 61b31ec..4e984d2 100644
--- a/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.cpp
+++ b/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -51,7 +51,7 @@
 
 std::unique_ptr<AccessCase> GetterSetterAccessCase::create(
     VM& vm, JSCell* owner, AccessType type, PropertyOffset offset, Structure* structure, const ObjectPropertyConditionSet& conditionSet,
-    bool viaProxy, WatchpointSet* additionalSet, PropertySlot::GetValueFunc customGetter, JSObject* customSlotBase,
+    bool viaProxy, WatchpointSet* additionalSet, FunctionPtr<OperationPtrTag> customGetter, JSObject* customSlotBase,
     std::optional<DOMAttributeAnnotation> domAttribute, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain)
 {
     switch (type) {
@@ -65,17 +65,17 @@
 
     std::unique_ptr<GetterSetterAccessCase> result(new GetterSetterAccessCase(vm, owner, type, offset, structure, conditionSet, viaProxy, additionalSet, customSlotBase, WTFMove(prototypeAccessChain)));
     result->m_domAttribute = domAttribute;
-    result->m_customAccessor.getter = customGetter;
+    result->m_customAccessor = customGetter ? FunctionPtr<OperationPtrTag>(customGetter) : nullptr;
     return WTFMove(result);
 }
 
 std::unique_ptr<AccessCase> GetterSetterAccessCase::create(VM& vm, JSCell* owner, AccessType type, Structure* structure, PropertyOffset offset,
-    const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain, PutPropertySlot::PutValueFunc customSetter,
+    const ObjectPropertyConditionSet& conditionSet, std::unique_ptr<PolyProtoAccessChain> prototypeAccessChain, FunctionPtr<OperationPtrTag> customSetter,
     JSObject* customSlotBase)
 {
     ASSERT(type == Setter || type == CustomValueSetter || type == CustomAccessorSetter);
     std::unique_ptr<GetterSetterAccessCase> result(new GetterSetterAccessCase(vm, owner, type, offset, structure, conditionSet, false, nullptr, customSlotBase, WTFMove(prototypeAccessChain)));
-    result->m_customAccessor.setter = customSetter;
+    result->m_customAccessor = customSetter ? FunctionPtr<OperationPtrTag>(customSetter) : nullptr;
     return WTFMove(result);
 }
 
@@ -89,7 +89,7 @@
     : Base(other)
     , m_customSlotBase(other.m_customSlotBase)
 {
-    m_customAccessor.opaque = other.m_customAccessor.opaque;
+    m_customAccessor = other.m_customAccessor;
     m_domAttribute = other.m_domAttribute;
 }
 
@@ -113,7 +113,7 @@
     out.print(comma, "customSlotBase = ", RawPointer(customSlotBase()));
     if (callLinkInfo())
         out.print(comma, "callLinkInfo = ", RawPointer(callLinkInfo()));
-    out.print(comma, "customAccessor = ", RawPointer(m_customAccessor.opaque));
+    out.print(comma, "customAccessor = ", RawPointer(m_customAccessor.executableAddress()));
 }
 
 void GetterSetterAccessCase::emitDOMJITGetter(AccessGenerationState& state, const DOMJIT::GetterSetter* domJIT, GPRReg baseForGetGPR)
diff --git a/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.h b/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.h
index 242abc9..1940e91 100644
--- a/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.h
+++ b/Source/JavaScriptCore/bytecode/GetterSetterAccessCase.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -27,6 +27,7 @@
 
 #if ENABLE(JIT)
 
+#include "MacroAssemblerCodeRef.h"
 #include "ProxyableAccessCase.h"
 
 namespace JSC {
@@ -48,19 +49,19 @@
 
     static std::unique_ptr<AccessCase> create(
         VM&, JSCell* owner, AccessType, PropertyOffset, Structure*,
-        const ObjectPropertyConditionSet&, bool viaProxy, WatchpointSet* additionalSet, PropertySlot::GetValueFunc,
+        const ObjectPropertyConditionSet&, bool viaProxy, WatchpointSet* additionalSet, FunctionPtr<OperationPtrTag> customGetter,
         JSObject* customSlotBase, std::optional<DOMAttributeAnnotation>, std::unique_ptr<PolyProtoAccessChain>);
 
     static std::unique_ptr<AccessCase> create(VM&, JSCell* owner, AccessType, Structure*, PropertyOffset,
         const ObjectPropertyConditionSet&, std::unique_ptr<PolyProtoAccessChain>,
-        PutPropertySlot::PutValueFunc = nullptr, JSObject* customSlotBase = nullptr);
+        FunctionPtr<OperationPtrTag> customSetter = nullptr, JSObject* customSlotBase = nullptr);
 
     void dumpImpl(PrintStream&, CommaPrinter&) const override;
     std::unique_ptr<AccessCase> clone() const override;
 
     ~GetterSetterAccessCase();
 
-    void* customAccessor() const { return m_customAccessor.opaque; }
+    FunctionPtr<OperationPtrTag> customAccessor() const { return m_customAccessor; }
 
 private:
     GetterSetterAccessCase(VM&, JSCell*, AccessType, PropertyOffset, Structure*, const ObjectPropertyConditionSet&, bool viaProxy, WatchpointSet* additionalSet, JSObject* customSlotBase, std::unique_ptr<PolyProtoAccessChain>);
@@ -69,11 +70,7 @@
 
     WriteBarrier<JSObject> m_customSlotBase;
     std::unique_ptr<CallLinkInfo> m_callLinkInfo;
-    union {
-        PutPropertySlot::PutValueFunc setter;
-        PropertySlot::GetValueFunc getter;
-        void* opaque;
-    } m_customAccessor;
+    FunctionPtr<OperationPtrTag> m_customAccessor;
     std::optional<DOMAttributeAnnotation> m_domAttribute;
 };
 
diff --git a/Source/JavaScriptCore/bytecode/HandlerInfo.h b/Source/JavaScriptCore/bytecode/HandlerInfo.h
index 29a77e1..66c3b76 100644
--- a/Source/JavaScriptCore/bytecode/HandlerInfo.h
+++ b/Source/JavaScriptCore/bytecode/HandlerInfo.h
@@ -109,14 +109,13 @@
     }
 
 #if ENABLE(JIT)
-    void initialize(const UnlinkedHandlerInfo& unlinkedInfo, CodeLocationLabel label)
+    void initialize(const UnlinkedHandlerInfo& unlinkedInfo, CodeLocationLabel<ExceptionHandlerPtrTag> label)
     {
         initialize(unlinkedInfo);
         nativeCode = label;
-        assertIsTaggedWith(nativeCode.executableAddress(), ExceptionHandlerPtrTag);
     }
 
-    CodeLocationLabel nativeCode;
+    CodeLocationLabel<ExceptionHandlerPtrTag> nativeCode;
 #endif
 };
 
diff --git a/Source/JavaScriptCore/bytecode/InlineAccess.cpp b/Source/JavaScriptCore/bytecode/InlineAccess.cpp
index 8a7fa95..dbb07a8 100644
--- a/Source/JavaScriptCore/bytecode/InlineAccess.cpp
+++ b/Source/JavaScriptCore/bytecode/InlineAccess.cpp
@@ -135,7 +135,7 @@
         LinkBuffer linkBuffer(jit, stubInfo.patch.start.dataLocation(), stubInfo.patch.inlineSize, JITCompilationMustSucceed, needsBranchCompaction);
         ASSERT(linkBuffer.isValid());
         function(linkBuffer);
-        FINALIZE_CODE(linkBuffer, JITCodePtrTag, "InlineAccessType: '%s'", name);
+        FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccessType: '%s'", name);
         return true;
     }
 
@@ -278,7 +278,7 @@
     return linkedCodeInline;
 }
 
-void InlineAccess::rewireStubAsJump(StructureStubInfo& stubInfo, CodeLocationLabel target)
+void InlineAccess::rewireStubAsJump(StructureStubInfo& stubInfo, CodeLocationLabel<JITStubRoutinePtrTag> target)
 {
     CCallHelpers jit;
 
@@ -290,7 +290,7 @@
     RELEASE_ASSERT(linkBuffer.isValid());
     linkBuffer.link(jump, target);
 
-    FINALIZE_CODE(linkBuffer, NearCodePtrTag, "InlineAccess: linking constant jump");
+    FINALIZE_CODE(linkBuffer, NoPtrTag, "InlineAccess: linking constant jump");
 }
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/bytecode/InlineAccess.h b/Source/JavaScriptCore/bytecode/InlineAccess.h
index 1baabe0..9948b3b 100644
--- a/Source/JavaScriptCore/bytecode/InlineAccess.h
+++ b/Source/JavaScriptCore/bytecode/InlineAccess.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -115,7 +115,7 @@
     static bool generateSelfPropertyReplace(StructureStubInfo&, Structure*, PropertyOffset);
     static bool isCacheableArrayLength(StructureStubInfo&, JSArray*);
     static bool generateArrayLength(StructureStubInfo&, JSArray*);
-    static void rewireStubAsJump(StructureStubInfo&, CodeLocationLabel);
+    static void rewireStubAsJump(StructureStubInfo&, CodeLocationLabel<JITStubRoutinePtrTag>);
 
     // This is helpful when determining the size of an IC on
     // various platforms. When adding a new type of IC, implement
diff --git a/Source/JavaScriptCore/bytecode/JumpTable.h b/Source/JavaScriptCore/bytecode/JumpTable.h
index d79a771..daaa2e9 100644
--- a/Source/JavaScriptCore/bytecode/JumpTable.h
+++ b/Source/JavaScriptCore/bytecode/JumpTable.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
  *
  * Redistribution and use in source and binary forms, with or without
@@ -39,7 +39,7 @@
     struct OffsetLocation {
         int32_t branchOffset;
 #if ENABLE(JIT)
-        CodeLocationLabel ctiOffset;
+        CodeLocationLabel<JSSwitchPtrTag> ctiOffset;
 #endif
     };
 
@@ -47,7 +47,7 @@
         typedef HashMap<RefPtr<StringImpl>, OffsetLocation> StringOffsetTable;
         StringOffsetTable offsetTable;
 #if ENABLE(JIT)
-        CodeLocationLabel ctiDefault; // FIXME: it should not be necessary to store this.
+        CodeLocationLabel<JSSwitchPtrTag> ctiDefault; // FIXME: it should not be necessary to store this.
 #endif
 
         inline int32_t offsetForValue(StringImpl* value, int32_t defaultOffset)
@@ -60,7 +60,7 @@
         }
 
 #if ENABLE(JIT)
-        inline CodeLocationLabel ctiForValue(StringImpl* value)
+        inline CodeLocationLabel<JSSwitchPtrTag> ctiForValue(StringImpl* value)
         {
             StringOffsetTable::const_iterator end = offsetTable.end();
             StringOffsetTable::const_iterator loc = offsetTable.find(value);
@@ -81,8 +81,8 @@
         Vector<int32_t> branchOffsets;
         int32_t min;
 #if ENABLE(JIT)
-        Vector<CodeLocationLabel> ctiOffsets;
-        CodeLocationLabel ctiDefault;
+        Vector<CodeLocationLabel<JSSwitchPtrTag>> ctiOffsets;
+        CodeLocationLabel<JSSwitchPtrTag> ctiDefault;
 #endif
 
         int32_t offsetForValue(int32_t value, int32_t defaultOffset);
@@ -99,7 +99,7 @@
             ctiOffsets.grow(branchOffsets.size());
         }
         
-        inline CodeLocationLabel ctiForValue(int32_t value)
+        inline CodeLocationLabel<JSSwitchPtrTag> ctiForValue(int32_t value)
         {
             if (value >= min && static_cast<uint32_t>(value - min) < ctiOffsets.size())
                 return ctiOffsets[value - min];
diff --git a/Source/JavaScriptCore/bytecode/LLIntCallLinkInfo.h b/Source/JavaScriptCore/bytecode/LLIntCallLinkInfo.h
index be2e817..47e2c38 100644
--- a/Source/JavaScriptCore/bytecode/LLIntCallLinkInfo.h
+++ b/Source/JavaScriptCore/bytecode/LLIntCallLinkInfo.h
@@ -50,14 +50,14 @@
     void unlink()
     {
         callee.clear();
-        machineCodeTarget = MacroAssemblerCodePtr();
+        machineCodeTarget = MacroAssemblerCodePtr<JSEntryPtrTag>();
         if (isOnList())
             remove();
     }
     
     WriteBarrier<JSObject> callee;
     WriteBarrier<JSObject> lastSeenCallee;
-    MacroAssemblerCodePtr machineCodeTarget;
+    MacroAssemblerCodePtr<JSEntryPtrTag> machineCodeTarget;
 };
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/bytecode/PolymorphicAccess.cpp b/Source/JavaScriptCore/bytecode/PolymorphicAccess.cpp
index 233070d..305eee4 100644
--- a/Source/JavaScriptCore/bytecode/PolymorphicAccess.cpp
+++ b/Source/JavaScriptCore/bytecode/PolymorphicAccess.cpp
@@ -199,11 +199,10 @@
             });
     } else {
         jit->setupArguments<decltype(lookupExceptionHandler)>(CCallHelpers::TrustedImmPtr(&m_vm), GPRInfo::callFrameRegister);
-        PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-        CCallHelpers::Call lookupExceptionHandlerCall = jit->call(tag);
+        CCallHelpers::Call lookupExceptionHandlerCall = jit->call(OperationPtrTag);
         jit->addLinkTask(
             [=] (LinkBuffer& linkBuffer) {
-                linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr(lookupExceptionHandler, tag));
+                linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr<OperationPtrTag>(lookupExceptionHandler));
             });
         jit->jumpToExceptionHandler(m_vm);
     }
@@ -539,7 +538,7 @@
                 linkBuffer.link(jumpToOSRExitExceptionHandler, oldHandler.nativeCode);
 
                 HandlerInfo handlerToRegister = oldHandler;
-                handlerToRegister.nativeCode = linkBuffer.locationOf(makeshiftCatchHandler, ExceptionHandlerPtrTag);
+                handlerToRegister.nativeCode = linkBuffer.locationOf<ExceptionHandlerPtrTag>(makeshiftCatchHandler);
                 handlerToRegister.start = newExceptionHandlingCallSite.bits();
                 handlerToRegister.end = newExceptionHandlingCallSite.bits() + 1;
                 codeBlock->appendExceptionHandler(handlerToRegister);
@@ -559,8 +558,8 @@
         return AccessGenerationResult::GaveUp;
     }
 
-    CodeLocationLabel successLabel = stubInfo.doneLocation();
-        
+    CodeLocationLabel<JSEntryPtrTag> successLabel = stubInfo.doneLocation();
+
     linkBuffer.link(state.success, successLabel);
 
     linkBuffer.link(failure, stubInfo.slowPathStartLocation());
@@ -568,8 +567,8 @@
     if (PolymorphicAccessInternal::verbose)
         dataLog(FullCodeOrigin(codeBlock, stubInfo.codeOrigin), ": Generating polymorphic access stub for ", listDump(cases), "\n");
 
-    MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
-        codeBlock, linkBuffer, NearCodePtrTag,
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> code = FINALIZE_CODE_FOR(
+        codeBlock, linkBuffer, JITStubRoutinePtrTag,
         "%s", toCString("Access stub for ", *codeBlock, " ", stubInfo.codeOrigin, " with return point ", successLabel, ": ", listDump(cases)).data());
 
     bool doesCalls = false;
diff --git a/Source/JavaScriptCore/bytecode/PolymorphicAccess.h b/Source/JavaScriptCore/bytecode/PolymorphicAccess.h
index 1ec24d3..3055855 100644
--- a/Source/JavaScriptCore/bytecode/PolymorphicAccess.h
+++ b/Source/JavaScriptCore/bytecode/PolymorphicAccess.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2014-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -68,7 +68,7 @@
         RELEASE_ASSERT(kind != GeneratedFinalCode);
     }
     
-    AccessGenerationResult(Kind kind, MacroAssemblerCodePtr code)
+    AccessGenerationResult(Kind kind, MacroAssemblerCodePtr<JITStubRoutinePtrTag> code)
         : m_kind(kind)
         , m_code(code)
     {
@@ -93,7 +93,7 @@
     
     Kind kind() const { return m_kind; }
     
-    const MacroAssemblerCodePtr& code() const { return m_code; }
+    const MacroAssemblerCodePtr<JITStubRoutinePtrTag>& code() const { return m_code; }
     
     bool madeNoChanges() const { return m_kind == MadeNoChanges; }
     bool gaveUp() const { return m_kind == GaveUp; }
@@ -123,7 +123,7 @@
     
 private:
     Kind m_kind;
-    MacroAssemblerCodePtr m_code;
+    MacroAssemblerCodePtr<JITStubRoutinePtrTag> m_code;
     Vector<std::pair<InlineWatchpointSet&, StringFireDetail>> m_watchpointsToFire;
 };
 
diff --git a/Source/JavaScriptCore/bytecode/StructureStubInfo.h b/Source/JavaScriptCore/bytecode/StructureStubInfo.h
index b13ef09..8c9fc90 100644
--- a/Source/JavaScriptCore/bytecode/StructureStubInfo.h
+++ b/Source/JavaScriptCore/bytecode/StructureStubInfo.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -169,7 +169,7 @@
     StructureSet bufferedStructures;
     
     struct {
-        CodeLocationLabel start; // This is either the start of the inline IC for *byId caches, or the location of patchable jump for 'in' caches.
+        CodeLocationLabel<JITStubRoutinePtrTag> start; // This is either the start of the inline IC for *byId caches, or the location of patchable jump for 'in' caches.
         RegisterSet usedRegisters;
         uint32_t inlineSize;
         int32_t deltaFromStartToSlowPathCallLocation;
@@ -185,13 +185,13 @@
 #endif
     } patch;
 
-    CodeLocationCall slowPathCallLocation() { return patch.start.callAtOffset(patch.deltaFromStartToSlowPathCallLocation); }
-    CodeLocationLabel doneLocation() { return patch.start.labelAtOffset(patch.inlineSize); }
-    CodeLocationLabel slowPathStartLocation() { return patch.start.labelAtOffset(patch.deltaFromStartToSlowPathStart); }
-    CodeLocationJump patchableJumpForIn()
+    CodeLocationCall<JSInternalPtrTag> slowPathCallLocation() { return patch.start.callAtOffset<JSInternalPtrTag>(patch.deltaFromStartToSlowPathCallLocation); }
+    CodeLocationLabel<JSEntryPtrTag> doneLocation() { return patch.start.labelAtOffset<JSEntryPtrTag>(patch.inlineSize); }
+    CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation() { return patch.start.labelAtOffset(patch.deltaFromStartToSlowPathStart); }
+    CodeLocationJump<JSInternalPtrTag> patchableJumpForIn()
     { 
         ASSERT(accessType == AccessType::In);
-        return patch.start.jumpAtOffset(0);
+        return patch.start.jumpAtOffset<JSInternalPtrTag>(0);
     }
 
     JSValueRegs valueRegs() const
diff --git a/Source/JavaScriptCore/dfg/DFGCommonData.h b/Source/JavaScriptCore/dfg/DFGCommonData.h
index 15e3853..3d887e3 100644
--- a/Source/JavaScriptCore/dfg/DFGCommonData.h
+++ b/Source/JavaScriptCore/dfg/DFGCommonData.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -98,7 +98,7 @@
             [] (const CatchEntrypointData* item) { return item->bytecodeIndex; });
     }
 
-    void appendCatchEntrypoint(unsigned bytecodeIndex, void* machineCode, Vector<FlushFormat>&& argumentFormats)
+    void appendCatchEntrypoint(unsigned bytecodeIndex, MacroAssemblerCodePtr<ExceptionHandlerPtrTag> machineCode, Vector<FlushFormat>&& argumentFormats)
     {
         catchEntrypoints.append(CatchEntrypointData { machineCode,  WTFMove(argumentFormats), bytecodeIndex });
     }
diff --git a/Source/JavaScriptCore/dfg/DFGDisassembler.cpp b/Source/JavaScriptCore/dfg/DFGDisassembler.cpp
index 0c20703..9aa2225 100644
--- a/Source/JavaScriptCore/dfg/DFGDisassembler.cpp
+++ b/Source/JavaScriptCore/dfg/DFGDisassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -165,11 +165,11 @@
         prefixBuffer[i + prefixLength] = ' ';
     prefixBuffer[prefixLength + amountOfNodeWhiteSpace] = 0;
     
-    CodeLocationLabel start = linkBuffer.locationOf(previousLabel);
-    CodeLocationLabel end = linkBuffer.locationOf(currentLabel);
+    CodeLocationLabel<DisassemblyPtrTag> start = linkBuffer.locationOf<DisassemblyPtrTag>(previousLabel);
+    CodeLocationLabel<DisassemblyPtrTag> end = linkBuffer.locationOf<DisassemblyPtrTag>(currentLabel);
     previousLabel = currentLabel;
-    ASSERT(end.executableAddress<uintptr_t>() >= start.executableAddress<uintptr_t>());
-    disassemble(start, end.executableAddress<uintptr_t>() - start.executableAddress<uintptr_t>(), prefixBuffer.data(), out);
+    ASSERT(end.dataLocation<uintptr_t>() >= start.dataLocation<uintptr_t>());
+    disassemble(start, end.dataLocation<uintptr_t>() - start.dataLocation<uintptr_t>(), prefixBuffer.data(), out);
 }
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGDriver.h b/Source/JavaScriptCore/dfg/DFGDriver.h
index 1985c58..b2ef2cb 100644
--- a/Source/JavaScriptCore/dfg/DFGDriver.h
+++ b/Source/JavaScriptCore/dfg/DFGDriver.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011, 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -32,7 +32,6 @@
 
 class CodeBlock;
 class JITCode;
-class MacroAssemblerCodePtr;
 class VM;
 
 namespace DFG {
diff --git a/Source/JavaScriptCore/dfg/DFGJITCompiler.cpp b/Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
index 3204b34..a6f0e23 100644
--- a/Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
+++ b/Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
@@ -85,9 +85,8 @@
         }
     }
     
-    MacroAssemblerCodeRef osrExitThunk = vm()->getCTIStub(osrExitThunkGenerator);
-    PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm());
-    CodeLocationLabel osrExitThunkLabel = CodeLocationLabel(osrExitThunk.retaggedCode(osrExitThunkTag, NearCodePtrTag));
+    MacroAssemblerCodeRef<JITThunkPtrTag> osrExitThunk = vm()->getCTIStub(osrExitThunkGenerator);
+    auto osrExitThunkLabel = CodeLocationLabel<JITThunkPtrTag>(osrExitThunk.code());
     for (unsigned i = 0; i < m_jitCode->osrExit.size(); ++i) {
         OSRExitCompilationInfo& info = m_exitCompilationInfo[i];
         JumpList& failureJumps = info.m_failureJumps;
@@ -162,8 +161,7 @@
         poke(GPRInfo::argumentGPR0);
         poke(GPRInfo::argumentGPR1, 1);
 #endif
-        PtrTag callTag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
-        m_calls.append(CallLinkRecord(call(callTag), FunctionPtr(lookupExceptionHandlerFromCallerFrame, callTag)));
+        m_calls.append(CallLinkRecord(call(OperationPtrTag), FunctionPtr<OperationPtrTag>(lookupExceptionHandlerFromCallerFrame)));
 
         jumpToExceptionHandler(*vm());
     }
@@ -182,8 +180,7 @@
         poke(GPRInfo::argumentGPR0);
         poke(GPRInfo::argumentGPR1, 1);
 #endif
-        PtrTag callTag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
-        m_calls.append(CallLinkRecord(call(callTag), FunctionPtr(lookupExceptionHandler, callTag)));
+        m_calls.append(CallLinkRecord(call(OperationPtrTag), FunctionPtr<OperationPtrTag>(lookupExceptionHandler)));
 
         jumpToExceptionHandler(*vm());
     }
@@ -217,15 +214,14 @@
         
         usedJumpTables.set(data.switchTableIndex);
         SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
-        PtrTag switchTag = ptrTag(SwitchTablePtrTag, &table);
-        table.ctiDefault = linkBuffer.locationOf(m_blockHeads[data.fallThrough.block->index], switchTag);
+        table.ctiDefault = linkBuffer.locationOf<JSSwitchPtrTag>(m_blockHeads[data.fallThrough.block->index]);
         table.ctiOffsets.grow(table.branchOffsets.size());
         for (unsigned j = table.ctiOffsets.size(); j--;)
             table.ctiOffsets[j] = table.ctiDefault;
         for (unsigned j = data.cases.size(); j--;) {
             SwitchCase& myCase = data.cases[j];
             table.ctiOffsets[myCase.value.switchLookupValue(data.kind) - table.min] =
-                linkBuffer.locationOf(m_blockHeads[myCase.target.block->index], switchTag);
+                linkBuffer.locationOf<JSSwitchPtrTag>(m_blockHeads[myCase.target.block->index]);
         }
     }
     
@@ -248,9 +244,8 @@
             continue;
         
         StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
-        PtrTag switchTag = ptrTag(SwitchTablePtrTag, &table);
 
-        table.ctiDefault = linkBuffer.locationOf(m_blockHeads[data.fallThrough.block->index], switchTag);
+        table.ctiDefault = linkBuffer.locationOf<JSSwitchPtrTag>(m_blockHeads[data.fallThrough.block->index]);
         StringJumpTable::StringOffsetTable::iterator iter;
         StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
         for (iter = table.offsetTable.begin(); iter != end; ++iter)
@@ -259,7 +254,7 @@
             SwitchCase& myCase = data.cases[j];
             iter = table.offsetTable.find(myCase.value.stringImpl());
             RELEASE_ASSERT(iter != end);
-            iter->value.ctiOffset = linkBuffer.locationOf(m_blockHeads[myCase.target.block->index], switchTag);
+            iter->value.ctiOffset = linkBuffer.locationOf<JSSwitchPtrTag>(m_blockHeads[myCase.target.block->index]);
         }
     }
 
@@ -277,52 +272,50 @@
     for (unsigned i = 0; i < m_ins.size(); ++i) {
         StructureStubInfo& info = *m_ins[i].m_stubInfo;
 
-        CodeLocationLabel start = linkBuffer.locationOf(m_ins[i].m_jump);
+        CodeLocationLabel<JITStubRoutinePtrTag> start = linkBuffer.locationOf<JITStubRoutinePtrTag>(m_ins[i].m_jump);
         info.patch.start = start;
 
         ptrdiff_t inlineSize = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(m_ins[i].m_done));
+            start, linkBuffer.locationOf<JSInternalPtrTag>(m_ins[i].m_done));
         RELEASE_ASSERT(inlineSize >= 0);
         info.patch.inlineSize = inlineSize;
 
         info.patch.deltaFromStartToSlowPathCallLocation = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(m_ins[i].m_slowPathGenerator->call()));
+            start, linkBuffer.locationOf<JSInternalPtrTag>(m_ins[i].m_slowPathGenerator->call()));
 
         info.patch.deltaFromStartToSlowPathStart = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(m_ins[i].m_slowPathGenerator->label()));
+            start, linkBuffer.locationOf<JSInternalPtrTag>(m_ins[i].m_slowPathGenerator->label()));
     }
     
-    PtrTag linkTag = ptrTag(LinkCallPtrTag, vm());
-    auto linkCallThunk = FunctionPtr(vm()->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag));
+    auto linkCallThunk = FunctionPtr<NoPtrTag>(vm()->getCTIStub(linkCallThunkGenerator).retaggedCode<NoPtrTag>());
     for (auto& record : m_jsCalls) {
         CallLinkInfo& info = *record.info;
         linkBuffer.link(record.slowCall, linkCallThunk);
         info.setCallLocations(
-            CodeLocationLabel(linkBuffer.locationOfNearCall(record.slowCall)),
-            CodeLocationLabel(linkBuffer.locationOf(record.targetToCheck)),
-            linkBuffer.locationOfNearCall(record.fastCall));
+            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(record.slowCall)),
+            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(record.targetToCheck)),
+            linkBuffer.locationOfNearCall<JSEntryPtrTag>(record.fastCall));
     }
     
     for (JSDirectCallRecord& record : m_jsDirectCalls) {
         CallLinkInfo& info = *record.info;
-        linkBuffer.link(record.call, linkBuffer.locationOf(record.slowPath, NearCodePtrTag));
+        linkBuffer.link(record.call, linkBuffer.locationOf<NoPtrTag>(record.slowPath));
         info.setCallLocations(
-            CodeLocationLabel(),
-            linkBuffer.locationOf(record.slowPath, NearCodePtrTag),
-            linkBuffer.locationOfNearCall(record.call));
+            CodeLocationLabel<JSEntryPtrTag>(),
+            linkBuffer.locationOf<JSEntryPtrTag>(record.slowPath),
+            linkBuffer.locationOfNearCall<JSEntryPtrTag>(record.call));
     }
     
     for (JSDirectTailCallRecord& record : m_jsDirectTailCalls) {
         CallLinkInfo& info = *record.info;
         info.setCallLocations(
-            linkBuffer.locationOf(record.patchableJump),
-            linkBuffer.locationOf(record.slowPath, NearCodePtrTag),
-            linkBuffer.locationOfNearCall(record.call));
+            linkBuffer.locationOf<JSEntryPtrTag>(record.patchableJump),
+            linkBuffer.locationOf<JSEntryPtrTag>(record.slowPath),
+            linkBuffer.locationOfNearCall<JSEntryPtrTag>(record.call));
     }
     
-    MacroAssemblerCodeRef osrExitThunk = vm()->getCTIStub(osrExitGenerationThunkGenerator);
-    PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm());
-    CodeLocationLabel target = CodeLocationLabel(osrExitThunk.retaggedCode(osrExitThunkTag, NearCodePtrTag));
+    MacroAssemblerCodeRef<JITThunkPtrTag> osrExitThunk = vm()->getCTIStub(osrExitGenerationThunkGenerator);
+    auto target = CodeLocationLabel<JITThunkPtrTag>(osrExitThunk.code());
     for (unsigned i = 0; i < m_jitCode->osrExit.size(); ++i) {
         OSRExitCompilationInfo& info = m_exitCompilationInfo[i];
         if (!Options::useProbeOSRExit()) {
@@ -332,8 +325,8 @@
         }
         if (info.m_replacementSource.isSet()) {
             m_jitCode->common.jumpReplacements.append(JumpReplacement(
-                linkBuffer.locationOf(info.m_replacementSource, NearCodePtrTag),
-                linkBuffer.locationOf(info.m_replacementDestination, NearCodePtrTag)));
+                linkBuffer.locationOf<JSInternalPtrTag>(info.m_replacementSource),
+                linkBuffer.locationOf<OSRExitPtrTag>(info.m_replacementDestination)));
         }
     }
     
@@ -341,9 +334,9 @@
         ASSERT(m_exitSiteLabels.size() == m_jitCode->osrExit.size());
         for (unsigned i = 0; i < m_exitSiteLabels.size(); ++i) {
             Vector<Label>& labels = m_exitSiteLabels[i];
-            Vector<const void*> addresses;
+            Vector<MacroAssemblerCodePtr<JSInternalPtrTag>> addresses;
             for (unsigned j = 0; j < labels.size(); ++j)
-                addresses.append(linkBuffer.locationOf(labels[j], DFGOSRExitPtrTag).executableAddress());
+                addresses.append(linkBuffer.locationOf<JSInternalPtrTag>(labels[j]));
             m_graph.compilation()->addOSRExitSite(addresses);
         }
     } else
@@ -360,7 +353,7 @@
             // i.e, we explicitly emitted an exceptionCheck that we know will be caught in this machine frame.
             // If this *is set*, it means we will be landing at this code location from genericUnwind from an
             // exception thrown in a child call frame.
-            CodeLocationLabel catchLabel = linkBuffer.locationOf(info.m_replacementDestination, ExceptionHandlerPtrTag);
+            CodeLocationLabel<ExceptionHandlerPtrTag> catchLabel = linkBuffer.locationOf<ExceptionHandlerPtrTag>(info.m_replacementDestination);
             HandlerInfo newExceptionHandler = m_exceptionHandlerOSRExitCallSites[i].baselineExceptionHandler;
             CallSiteIndex callSite = m_exceptionHandlerOSRExitCallSites[i].callSiteIndex;
             newExceptionHandler.start = callSite.bits();
@@ -539,11 +532,11 @@
     m_jitCode->shrinkToFit();
     codeBlock()->shrinkToFit(CodeBlock::LateShrink);
 
-    linkBuffer->link(callArityFixup, FunctionPtr(vm()->getCTIStub(arityFixupGenerator).retaggedCode(ptrTag(ArityFixupPtrTag, vm()), NearCodePtrTag)));
+    linkBuffer->link(callArityFixup, FunctionPtr<JITThunkPtrTag>(vm()->getCTIStub(arityFixupGenerator).code()));
 
     disassemble(*linkBuffer);
 
-    MacroAssemblerCodePtr withArityCheck = linkBuffer->locationOf(m_arityCheck, CodePtrTag);
+    MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck = linkBuffer->locationOf<JSEntryPtrTag>(m_arityCheck);
 
     m_graph.m_plan.finalizer = std::make_unique<JITFinalizer>(
         m_graph.m_plan, m_jitCode.releaseNonNull(), WTFMove(linkBuffer), withArityCheck);
@@ -583,7 +576,7 @@
 {
     RELEASE_ASSERT(basicBlock.isCatchEntrypoint);
     RELEASE_ASSERT(basicBlock.intersectionOfCFAHasVisited); // An entrypoint is reachable by definition.
-    m_jitCode->common.appendCatchEntrypoint(basicBlock.bytecodeBegin, linkBuffer.locationOf(blockHead, ExceptionHandlerPtrTag).executableAddress(), WTFMove(argumentFormats));
+    m_jitCode->common.appendCatchEntrypoint(basicBlock.bytecodeBegin, linkBuffer.locationOf<ExceptionHandlerPtrTag>(blockHead), WTFMove(argumentFormats));
 }
 
 void JITCompiler::noticeOSREntry(BasicBlock& basicBlock, JITCompiler::Label blockHead, LinkBuffer& linkBuffer)
diff --git a/Source/JavaScriptCore/dfg/DFGJITCompiler.h b/Source/JavaScriptCore/dfg/DFGJITCompiler.h
index f288824..b6b2e2d 100644
--- a/Source/JavaScriptCore/dfg/DFGJITCompiler.h
+++ b/Source/JavaScriptCore/dfg/DFGJITCompiler.h
@@ -66,14 +66,14 @@
 // Every CallLinkRecord contains a reference to the call instruction & the function
 // that it needs to be linked to.
 struct CallLinkRecord {
-    CallLinkRecord(MacroAssembler::Call call, FunctionPtr function)
+    CallLinkRecord(MacroAssembler::Call call, FunctionPtr<OperationPtrTag> function)
         : m_call(call)
         , m_function(function)
     {
     }
 
     MacroAssembler::Call m_call;
-    FunctionPtr m_function;
+    FunctionPtr<OperationPtrTag> m_function;
 };
 
 struct InRecord {
@@ -156,10 +156,10 @@
     }
 
     // Add a call out from JIT code, without an exception check.
-    Call appendCall(const FunctionPtr function, PtrTag tag = CFunctionPtrTag)
+    Call appendCall(const FunctionPtr<CFunctionPtrTag> function)
     {
-        Call functionCall = call(tag);
-        m_calls.append(CallLinkRecord(functionCall, FunctionPtr(function, tag)));
+        Call functionCall = call(OperationPtrTag);
+        m_calls.append(CallLinkRecord(functionCall, function.retagged<OperationPtrTag>()));
         return functionCall;
     }
     
@@ -310,6 +310,8 @@
             , targetToCheck(targetToCheck)
             , info(info)
         {
+            ASSERT(fastCall.isFlagSet(Call::Near));
+            ASSERT(slowCall.isFlagSet(Call::Near));
         }
         
         Call fastCall;
@@ -324,6 +326,7 @@
             , slowPath(slowPath)
             , info(info)
         {
+            ASSERT(call.isFlagSet(Call::Near));
         }
         
         Call call;
@@ -338,6 +341,7 @@
             , slowPath(slowPath)
             , info(info)
         {
+            ASSERT(call.isFlagSet(Call::Near) && call.isFlagSet(Call::Tail));
         }
         
         PatchableJump patchableJump;
diff --git a/Source/JavaScriptCore/dfg/DFGJITFinalizer.cpp b/Source/JavaScriptCore/dfg/DFGJITFinalizer.cpp
index 0c60489..e46196c 100644
--- a/Source/JavaScriptCore/dfg/DFGJITFinalizer.cpp
+++ b/Source/JavaScriptCore/dfg/DFGJITFinalizer.cpp
@@ -37,7 +37,7 @@
 
 namespace JSC { namespace DFG {
 
-JITFinalizer::JITFinalizer(Plan& plan, Ref<JITCode>&& jitCode, std::unique_ptr<LinkBuffer> linkBuffer, MacroAssemblerCodePtr withArityCheck)
+JITFinalizer::JITFinalizer(Plan& plan, Ref<JITCode>&& jitCode, std::unique_ptr<LinkBuffer> linkBuffer, MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck)
     : Finalizer(plan)
     , m_jitCode(WTFMove(jitCode))
     , m_linkBuffer(WTFMove(linkBuffer))
@@ -56,7 +56,7 @@
 
 bool JITFinalizer::finalize()
 {
-    MacroAssemblerCodeRef codeRef = FINALIZE_DFG_CODE(*m_linkBuffer, CodePtrTag, "DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data());
+    MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = FINALIZE_DFG_CODE(*m_linkBuffer, JSEntryPtrTag, "DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data());
     m_jitCode->initializeCodeRef(codeRef, codeRef.code());
     
     m_plan.codeBlock->setJITCode(m_jitCode.copyRef());
@@ -70,7 +70,7 @@
 {
     RELEASE_ASSERT(!m_withArityCheck.isEmptyValue());
     m_jitCode->initializeCodeRef(
-        FINALIZE_DFG_CODE(*m_linkBuffer, CodePtrTag, "DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data()),
+        FINALIZE_DFG_CODE(*m_linkBuffer, JSEntryPtrTag, "DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data()),
         m_withArityCheck);
     m_plan.codeBlock->setJITCode(m_jitCode.copyRef());
     
diff --git a/Source/JavaScriptCore/dfg/DFGJITFinalizer.h b/Source/JavaScriptCore/dfg/DFGJITFinalizer.h
index 9ac04e0..2fc2745 100644
--- a/Source/JavaScriptCore/dfg/DFGJITFinalizer.h
+++ b/Source/JavaScriptCore/dfg/DFGJITFinalizer.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -35,7 +35,7 @@
 
 class JITFinalizer : public Finalizer {
 public:
-    JITFinalizer(Plan&, Ref<JITCode>&&, std::unique_ptr<LinkBuffer>, MacroAssemblerCodePtr withArityCheck = MacroAssemblerCodePtr(MacroAssemblerCodePtr::EmptyValue));
+    JITFinalizer(Plan&, Ref<JITCode>&&, std::unique_ptr<LinkBuffer>, MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck = MacroAssemblerCodePtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntryPtrTag>::EmptyValue));
     virtual ~JITFinalizer();
     
     size_t codeSize() override;
@@ -47,7 +47,7 @@
     
     Ref<JITCode> m_jitCode;
     std::unique_ptr<LinkBuffer> m_linkBuffer;
-    MacroAssemblerCodePtr m_withArityCheck;
+    MacroAssemblerCodePtr<JSEntryPtrTag> m_withArityCheck;
 };
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGJumpReplacement.h b/Source/JavaScriptCore/dfg/DFGJumpReplacement.h
index 77d3938..88bd78b 100644
--- a/Source/JavaScriptCore/dfg/DFGJumpReplacement.h
+++ b/Source/JavaScriptCore/dfg/DFGJumpReplacement.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -33,7 +33,7 @@
 
 class JumpReplacement {
 public:
-    JumpReplacement(CodeLocationLabel source, CodeLocationLabel destination)
+    JumpReplacement(CodeLocationLabel<JSInternalPtrTag> source, CodeLocationLabel<OSRExitPtrTag> destination)
         : m_source(source)
         , m_destination(destination)
     {
@@ -44,8 +44,8 @@
     void* dataLocation() const { return m_source.dataLocation(); }
 
 private:
-    CodeLocationLabel m_source;
-    CodeLocationLabel m_destination;
+    CodeLocationLabel<JSInternalPtrTag> m_source;
+    CodeLocationLabel<OSRExitPtrTag> m_destination;
 };
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGNode.h b/Source/JavaScriptCore/dfg/DFGNode.h
index 70f9a3b..d201315 100644
--- a/Source/JavaScriptCore/dfg/DFGNode.h
+++ b/Source/JavaScriptCore/dfg/DFGNode.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -247,7 +247,7 @@
 };
 
 struct CallDOMGetterData {
-    PropertySlot::GetValueFunc customAccessorGetter { nullptr };
+    FunctionPtr<OperationPtrTag> customAccessorGetter;
     const DOMJIT::GetterSetter* domJIT { nullptr };
     DOMJIT::CallDOMGetterSnippet* snippet { nullptr };
     unsigned identifierNumber { 0 };
diff --git a/Source/JavaScriptCore/dfg/DFGOSREntry.cpp b/Source/JavaScriptCore/dfg/DFGOSREntry.cpp
index 3340ddc..13e1577 100644
--- a/Source/JavaScriptCore/dfg/DFGOSREntry.cpp
+++ b/Source/JavaScriptCore/dfg/DFGOSREntry.cpp
@@ -273,7 +273,7 @@
     if (Options::verboseOSR())
         dataLogF("    OSR using target PC %p.\n", targetPC);
     RELEASE_ASSERT(targetPC);
-    *bitwise_cast<void**>(scratch + 1) = retagCodePtr(targetPC, CodePtrTag, bitwise_cast<PtrTag>(exec));
+    *bitwise_cast<void**>(scratch + 1) = retagCodePtr(targetPC, JSEntryPtrTag, bitwise_cast<PtrTag>(exec));
 
     Register* pivot = scratch + 2 + CallFrame::headerSizeInRegisters;
     
@@ -337,7 +337,7 @@
     return scratch;
 }
 
-void* prepareCatchOSREntry(ExecState* exec, CodeBlock* codeBlock, unsigned bytecodeIndex)
+MacroAssemblerCodePtr<ExceptionHandlerPtrTag> prepareCatchOSREntry(ExecState* exec, CodeBlock* codeBlock, unsigned bytecodeIndex)
 { 
     ASSERT(codeBlock->jitType() == JITCode::DFGJIT || codeBlock->jitType() == JITCode::FTLJIT);
 
diff --git a/Source/JavaScriptCore/dfg/DFGOSREntry.h b/Source/JavaScriptCore/dfg/DFGOSREntry.h
index 456fa4c..bfb6e61 100644
--- a/Source/JavaScriptCore/dfg/DFGOSREntry.h
+++ b/Source/JavaScriptCore/dfg/DFGOSREntry.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011, 2013, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -27,6 +27,7 @@
 
 #include "DFGAbstractValue.h"
 #include "DFGFlushFormat.h"
+#include "MacroAssemblerCodeRef.h"
 #include "Operands.h"
 #include <wtf/BitVector.h>
 
@@ -73,7 +74,7 @@
 struct CatchEntrypointData {
     // We use this when doing OSR entry at catch. We prove the arguments
     // are of the expected type before entering at a catch block.
-    void* machineCode;
+    MacroAssemblerCodePtr<ExceptionHandlerPtrTag> machineCode;
     Vector<FlushFormat> argumentFormats;
     unsigned bytecodeIndex;
 };
@@ -83,9 +84,9 @@
 void* prepareOSREntry(ExecState*, CodeBlock*, unsigned bytecodeIndex);
 
 // If null is returned, we can't OSR enter. If it's not null, it's the PC to jump to.
-void* prepareCatchOSREntry(ExecState*, CodeBlock*, unsigned bytecodeIndex);
+MacroAssemblerCodePtr<ExceptionHandlerPtrTag> prepareCatchOSREntry(ExecState*, CodeBlock*, unsigned bytecodeIndex);
 #else
-inline void* prepareOSREntry(ExecState*, CodeBlock*, unsigned) { return 0; }
+inline MacroAssemblerCodePtr<ExceptionHandlerPtrTag> prepareOSREntry(ExecState*, CodeBlock*, unsigned) { return nullptr; }
 #endif
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGOSRExit.cpp b/Source/JavaScriptCore/dfg/DFGOSRExit.cpp
index 2718457..48d8710 100644
--- a/Source/JavaScriptCore/dfg/DFGOSRExit.cpp
+++ b/Source/JavaScriptCore/dfg/DFGOSRExit.cpp
@@ -390,11 +390,10 @@
 
         CodeBlock* codeBlockForExit = baselineCodeBlockForOriginAndBaselineCodeBlock(exit.m_codeOrigin, baselineCodeBlock);
         const JITCodeMap& codeMap = codeBlockForExit->jitCodeMap();
-        CodeLocationLabel codeLocation = codeMap.find(exit.m_codeOrigin.bytecodeIndex);
+        CodeLocationLabel<JSEntryPtrTag> codeLocation = codeMap.find(exit.m_codeOrigin.bytecodeIndex);
         ASSERT(codeLocation);
 
-        PtrTag locationTag = ptrTag(CodePtrTag, codeBlockForExit, exit.m_codeOrigin.bytecodeIndex);
-        void* jumpTarget = codeLocation.retagged(locationTag, CodePtrTag).executableAddress();
+        void* jumpTarget = codeLocation.executableAddress();
 
         // Compute the value recoveries.
         Operands<ValueRecovery> operands;
@@ -791,7 +790,7 @@
 
 #if USE(POINTER_PROFILING)
             void* newEntrySP = cpu.fp<uint8_t*>() + inlineCallFrame->returnPCOffset() + sizeof(void*);
-            jumpTarget = tagCodePtr(jumpTarget, bitwise_cast<PtrTag>(newEntrySP));
+            jumpTarget = retagCodePtr(jumpTarget, JSEntryPtrTag, bitwise_cast<PtrTag>(newEntrySP));
 #endif
             frame.set<void*>(inlineCallFrame->returnPCOffset(), jumpTarget);
         }
@@ -866,7 +865,7 @@
     }
 
     vm.topCallFrame = context.fp<ExecState*>();
-    context.pc() = untagCodePtr(jumpTarget, CodePtrTag);
+    context.pc() = untagCodePtr<JSEntryPtrTag>(jumpTarget);
 }
 
 static void printOSRExit(Context& context, uint32_t osrExitIndex, const OSRExit& exit)
@@ -931,9 +930,9 @@
     return MacroAssembler::Jump(AssemblerLabel(m_patchableCodeOffset));
 }
 
-CodeLocationJump OSRExit::codeLocationForRepatch(CodeBlock* dfgCodeBlock) const
+CodeLocationJump<JSInternalPtrTag> OSRExit::codeLocationForRepatch(CodeBlock* dfgCodeBlock) const
 {
-    return CodeLocationJump(dfgCodeBlock->jitCode()->dataAddressAtOffset(m_patchableCodeOffset));
+    return CodeLocationJump<JSInternalPtrTag>(tagCodePtr<JSInternalPtrTag>(dfgCodeBlock->jitCode()->dataAddressAtOffset(m_patchableCodeOffset)));
 }
 
 void OSRExit::correctJump(LinkBuffer& linkBuffer)
@@ -995,19 +994,18 @@
         static_assert(std::is_same<decltype(operationCreateDirectArgumentsDuringExit), decltype(operationCreateClonedArgumentsDuringExit)>::value, "We assume these functions have the same signature below.");
         jit.setupArguments<decltype(operationCreateDirectArgumentsDuringExit)>(
             AssemblyHelpers::TrustedImmPtr(inlineCallFrame), GPRInfo::regT0, GPRInfo::regT1);
-        PtrTag tag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
         switch (recovery.technique()) {
         case DirectArgumentsThatWereNotCreated:
-            jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr(operationCreateDirectArgumentsDuringExit, tag)), GPRInfo::nonArgGPR0);
+            jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationCreateDirectArgumentsDuringExit)), GPRInfo::nonArgGPR0);
             break;
         case ClonedArgumentsThatWereNotCreated:
-            jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr(operationCreateClonedArgumentsDuringExit, tag)), GPRInfo::nonArgGPR0);
+            jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationCreateClonedArgumentsDuringExit)), GPRInfo::nonArgGPR0);
             break;
         default:
             RELEASE_ASSERT_NOT_REACHED();
             break;
         }
-        jit.call(GPRInfo::nonArgGPR0, tag);
+        jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
         jit.storeCell(GPRInfo::returnValueGPR, AssemblyHelpers::addressFor(operand));
 
         alreadyAllocatedArguments.add(id, operand);
@@ -1046,7 +1044,6 @@
     if (exit.m_recoveryIndex != UINT_MAX)
         recovery = &codeBlock->jitCode()->dfg()->speculationRecovery[exit.m_recoveryIndex];
 
-    PtrTag exitTag = ptrTag(DFGOSRExitPtrTag, vm);
     {
         CCallHelpers jit(codeBlock);
 
@@ -1077,14 +1074,14 @@
         LinkBuffer patchBuffer(jit, codeBlock);
         exit.m_code = FINALIZE_CODE_IF(
             shouldDumpDisassembly() || Options::verboseOSR() || Options::verboseDFGOSRExit(),
-            patchBuffer, exitTag,
+            patchBuffer, OSRExitPtrTag,
             "DFG OSR exit #%u (%s, %s) from %s, with operands = %s",
                 exitIndex, toCString(exit.m_codeOrigin).data(),
                 exitKindToString(exit.m_kind), toCString(*codeBlock).data(),
                 toCString(ignoringContext<DumpContext>(operands)).data());
     }
 
-    MacroAssembler::repatchJump(exit.codeLocationForRepatch(codeBlock), CodeLocationLabel(exit.m_code.retaggedCode(exitTag, NearCodePtrTag)));
+    MacroAssembler::repatchJump(exit.codeLocationForRepatch(codeBlock), CodeLocationLabel<OSRExitPtrTag>(exit.m_code.code()));
 
     vm->osrExitJumpDestination = exit.m_code.code().executableAddress();
 }
diff --git a/Source/JavaScriptCore/dfg/DFGOSRExit.h b/Source/JavaScriptCore/dfg/DFGOSRExit.h
index 26d325f..4554975 100644
--- a/Source/JavaScriptCore/dfg/DFGOSRExit.h
+++ b/Source/JavaScriptCore/dfg/DFGOSRExit.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -145,9 +145,9 @@
     static void JIT_OPERATION compileOSRExit(ExecState*) WTF_INTERNAL;
     static void executeOSRExit(Probe::Context&);
 
+    // FIXME: <rdar://problem/39498244>.
     unsigned m_patchableCodeOffset { 0 };
-    
-    MacroAssemblerCodeRef m_code;
+    MacroAssemblerCodeRef<OSRExitPtrTag> m_code;
 
     RefPtr<OSRExitState> exitState;
     
@@ -158,7 +158,7 @@
 
     void setPatchableCodeOffset(MacroAssembler::PatchableJump);
     MacroAssembler::Jump getPatchableCodeOffsetAsJump() const;
-    CodeLocationJump codeLocationForRepatch(CodeBlock*) const;
+    CodeLocationJump<JSInternalPtrTag> codeLocationForRepatch(CodeBlock*) const;
     void correctJump(LinkBuffer&);
 
     unsigned m_streamIndex;
diff --git a/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.cpp b/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.cpp
index 3ff7378..36bae46 100644
--- a/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.cpp
+++ b/Source/JavaScriptCore/dfg/DFGOSRExitCompilerCommon.cpp
@@ -109,9 +109,8 @@
     jit.move(GPRInfo::regT0, GPRInfo::argumentGPR0);
     jit.move(AssemblyHelpers::TrustedImmPtr(&exit), GPRInfo::argumentGPR1);
 #endif
-    PtrTag tag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
-    jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr(triggerReoptimizationNow, tag)), GPRInfo::nonArgGPR0);
-    jit.call(GPRInfo::nonArgGPR0, tag);
+    jit.move(AssemblyHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(triggerReoptimizationNow)), GPRInfo::nonArgGPR0);
+    jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
     AssemblyHelpers::Jump doneAdjusting = jit.jump();
     
     tooFewFails.link(&jit);
@@ -188,7 +187,7 @@
                     baselineCodeBlockForCaller->getCallLinkInfoForBytecodeIndex(callBytecodeIndex);
                 RELEASE_ASSERT(callLinkInfo);
 
-                jumpTarget = callLinkInfo->callReturnLocation().executableAddress();
+                jumpTarget = callLinkInfo->callReturnLocation().untaggedExecutableAddress();
                 break;
             }
 
@@ -198,7 +197,7 @@
                     baselineCodeBlockForCaller->findStubInfo(CodeOrigin(callBytecodeIndex));
                 RELEASE_ASSERT(stubInfo);
 
-                jumpTarget = stubInfo->doneLocation().executableAddress();
+                jumpTarget = stubInfo->doneLocation().untaggedExecutableAddress();
                 break;
             }
 
@@ -276,9 +275,8 @@
 #endif
 
     jit.setupArguments<decltype(operationOSRWriteBarrier)>(owner);
-    PtrTag tag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
-    jit.move(MacroAssembler::TrustedImmPtr(tagCFunctionPtr(operationOSRWriteBarrier, tag)), scratch);
-    jit.call(scratch, tag);
+    jit.move(MacroAssembler::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationOSRWriteBarrier)), scratch);
+    jit.call(scratch, OperationPtrTag);
 
 #if CPU(X86)
     jit.addPtr(MacroAssembler::TrustedImm32(sizeof(void*) * 4), MacroAssembler::stackPointerRegister);
@@ -319,12 +317,10 @@
     CodeBlock* codeBlockForExit = jit.baselineCodeBlockFor(exit.m_codeOrigin);
     ASSERT(codeBlockForExit == codeBlockForExit->baselineVersion());
     ASSERT(codeBlockForExit->jitType() == JITCode::BaselineJIT);
-    CodeLocationLabel codeLocation = codeBlockForExit->jitCodeMap().find(exit.m_codeOrigin.bytecodeIndex);
+    CodeLocationLabel<JSEntryPtrTag> codeLocation = codeBlockForExit->jitCodeMap().find(exit.m_codeOrigin.bytecodeIndex);
     ASSERT(codeLocation);
 
-    PtrTag locationTag = ptrTag(CodePtrTag, codeBlockForExit, exit.m_codeOrigin.bytecodeIndex);
-    PtrTag exitTag = ptrTag(DFGOSRExitPtrTag, nextPtrTagID());
-    void* jumpTarget = codeLocation.retagged(locationTag, exitTag).executableAddress();
+    void* jumpTarget = codeLocation.retagged<OSRExitPtrTag>().executableAddress();
     jit.addPtr(AssemblyHelpers::TrustedImm32(JIT::stackPointerOffsetFor(codeBlockForExit) * sizeof(Register)), GPRInfo::callFrameRegister, AssemblyHelpers::stackPointerRegister);
     if (exit.isExceptionHandler()) {
         // Since we're jumping to op_catch, we need to set callFrameForCatch.
@@ -332,7 +328,7 @@
     }
     
     jit.move(AssemblyHelpers::TrustedImmPtr(jumpTarget), GPRInfo::regT2);
-    jit.jump(GPRInfo::regT2, exitTag);
+    jit.jump(GPRInfo::regT2, OSRExitPtrTag);
 }
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGOperations.cpp b/Source/JavaScriptCore/dfg/DFGOperations.cpp
index 6c9217d..f8fa929 100644
--- a/Source/JavaScriptCore/dfg/DFGOperations.cpp
+++ b/Source/JavaScriptCore/dfg/DFGOperations.cpp
@@ -3044,7 +3044,7 @@
                 dataLog("OSR entry: From ", RawPointer(jitCode), " got entry block ", RawPointer(entryBlock), "\n");
             if (void* address = FTL::prepareOSREntry(exec, codeBlock, entryBlock, originBytecodeIndex, streamIndex)) {
                 CODEBLOCK_LOG_EVENT(entryBlock, "osrEntry", ("at bc#", originBytecodeIndex));
-                return retagCodePtr<char*>(address, CodePtrTag, bitwise_cast<PtrTag>(exec));
+                return retagCodePtr<char*>(address, JSEntryPtrTag, bitwise_cast<PtrTag>(exec));
             }
         }
     }
@@ -3166,7 +3166,7 @@
         exec, codeBlock, jitCode->osrEntryBlock(), originBytecodeIndex, streamIndex);
     if (!address)
         return nullptr;
-    return retagCodePtr<char*>(address, CodePtrTag, bitwise_cast<PtrTag>(exec));
+    return retagCodePtr<char*>(address, JSEntryPtrTag, bitwise_cast<PtrTag>(exec));
 }
 
 void JIT_OPERATION triggerTierUpNowInLoop(ExecState* exec, unsigned bytecodeIndex)
diff --git a/Source/JavaScriptCore/dfg/DFGSlowPathGenerator.h b/Source/JavaScriptCore/dfg/DFGSlowPathGenerator.h
index a91c72b..4ca8707 100644
--- a/Source/JavaScriptCore/dfg/DFGSlowPathGenerator.h
+++ b/Source/JavaScriptCore/dfg/DFGSlowPathGenerator.h
@@ -162,12 +162,11 @@
     : public CallSlowPathGenerator<JumpType, FunctionType, ResultType> {
 public:
     CallResultAndArgumentsSlowPathGenerator(
-        JumpType from, SpeculativeJIT* jit, FunctionType function, PtrTag callTag,
+        JumpType from, SpeculativeJIT* jit, FunctionType function,
         SpillRegistersMode spillMode, ExceptionCheckRequirement requirement, ResultType result, Arguments... arguments)
         : CallSlowPathGenerator<JumpType, FunctionType, ResultType>(
             from, jit, function, spillMode, requirement, result)
         , m_arguments(std::forward<Arguments>(arguments)...)
-        , m_callTag(callTag)
     {
     }
 
@@ -176,7 +175,7 @@
     void unpackAndGenerate(SpeculativeJIT* jit, std::index_sequence<ArgumentsIndex...>)
     {
         this->setUp(jit);
-        this->recordCall(jit->callOperation(this->m_function, m_callTag, extractResult(this->m_result), std::get<ArgumentsIndex>(m_arguments)...));
+        this->recordCall(jit->callOperation(this->m_function, extractResult(this->m_result), std::get<ArgumentsIndex>(m_arguments)...));
         this->tearDown(jit);
     }
 
@@ -186,36 +185,16 @@
     }
 
     std::tuple<Arguments...> m_arguments;
-    PtrTag m_callTag;
 };
 
 template<typename JumpType, typename FunctionType, typename ResultType, typename... Arguments>
 inline std::unique_ptr<SlowPathGenerator> slowPathCall(
-    JumpType from, SpeculativeJIT* jit, FunctionType function, PtrTag callTag,
-    SpillRegistersMode spillMode, ExceptionCheckRequirement requirement,
-    ResultType result, Arguments... arguments)
-{
-    return std::make_unique<CallResultAndArgumentsSlowPathGenerator<JumpType, FunctionType, ResultType, Arguments...>>(
-        from, jit, function, callTag, spillMode, requirement, result, arguments...);
-}
-
-template<typename JumpType, typename FunctionType, typename ResultType, typename... Arguments>
-inline std::unique_ptr<SlowPathGenerator> slowPathCall(
     JumpType from, SpeculativeJIT* jit, FunctionType function,
     SpillRegistersMode spillMode, ExceptionCheckRequirement requirement,
     ResultType result, Arguments... arguments)
 {
     return std::make_unique<CallResultAndArgumentsSlowPathGenerator<JumpType, FunctionType, ResultType, Arguments...>>(
-        from, jit, function, CFunctionPtrTag, spillMode, requirement, result, arguments...);
-}
-
-template<typename JumpType, typename FunctionType, typename ResultType, typename... Arguments>
-inline std::unique_ptr<SlowPathGenerator> slowPathCall(
-    JumpType from, SpeculativeJIT* jit, FunctionType function, PtrTag callTag,
-    ResultType result, Arguments... arguments)
-{
-    return slowPathCall(
-        from, jit, function, callTag, NeedToSpill, ExceptionCheckRequirement::CheckNeeded, result, arguments...);
+        from, jit, function, spillMode, requirement, result, arguments...);
 }
 
 template<typename JumpType, typename FunctionType, typename ResultType, typename... Arguments>
@@ -224,7 +203,7 @@
     ResultType result, Arguments... arguments)
 {
     return slowPathCall(
-        from, jit, function, CFunctionPtrTag, NeedToSpill, ExceptionCheckRequirement::CheckNeeded, result, arguments...);
+        from, jit, function, NeedToSpill, ExceptionCheckRequirement::CheckNeeded, result, arguments...);
 }
 
 template<typename JumpType, typename DestinationType, typename SourceType, unsigned numberOfAssignments>
diff --git a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.cpp b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.cpp
index fa96fd2..df69227 100644
--- a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.cpp
+++ b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.cpp
@@ -3932,8 +3932,6 @@
 
     bool shouldEmitProfiling = false;
     bool generatedInline = mathIC->generateInline(m_jit, *addICGenerationState, shouldEmitProfiling);
-    PtrTag mathICTag = ptrTag(MathICPtrTag, mathIC->instruction());
-
     if (generatedInline) {
         ASSERT(!addICGenerationState->slowPathJumps.empty());
 
@@ -3962,9 +3960,9 @@
             }
 
             if (addICGenerationState->shouldSlowPathRepatch)
-                addICGenerationState->slowPathCall = callOperation(bitwise_cast<J_JITOperation_EJJMic>(repatchingFunction), mathICTag, resultRegs, innerLeftRegs, innerRightRegs, TrustedImmPtr(mathIC));
+                addICGenerationState->slowPathCall = callOperation(bitwise_cast<J_JITOperation_EJJMic>(repatchingFunction), resultRegs, innerLeftRegs, innerRightRegs, TrustedImmPtr(mathIC));
             else
-                addICGenerationState->slowPathCall = callOperation(nonRepatchingFunction, mathICTag, resultRegs, innerLeftRegs, innerRightRegs);
+                addICGenerationState->slowPathCall = callOperation(nonRepatchingFunction, resultRegs, innerLeftRegs, innerRightRegs);
 
             silentFill(savePlans);
             m_jit.exceptionCheck();
@@ -3993,7 +3991,7 @@
         }
 
         flushRegisters();
-        callOperation(nonRepatchingFunction, mathICTag, resultRegs, leftRegs, rightRegs);
+        callOperation(nonRepatchingFunction, resultRegs, leftRegs, rightRegs);
         m_jit.exceptionCheck();
     }
 
@@ -4638,8 +4636,6 @@
 
     bool shouldEmitProfiling = false;
     bool generatedInline = mathIC->generateInline(m_jit, *icGenerationState, shouldEmitProfiling);
-    PtrTag mathICTag = ptrTag(MathICPtrTag, mathIC->instruction());
-
     if (generatedInline) {
         ASSERT(!icGenerationState->slowPathJumps.empty());
 
@@ -4658,9 +4654,9 @@
             silentSpill(savePlans);
 
             if (icGenerationState->shouldSlowPathRepatch)
-                icGenerationState->slowPathCall = callOperation(bitwise_cast<J_JITOperation_EJMic>(repatchingFunction), mathICTag, resultRegs, childRegs, TrustedImmPtr(mathIC));
+                icGenerationState->slowPathCall = callOperation(bitwise_cast<J_JITOperation_EJMic>(repatchingFunction), resultRegs, childRegs, TrustedImmPtr(mathIC));
             else
-                icGenerationState->slowPathCall = callOperation(nonRepatchingFunction, mathICTag, resultRegs, childRegs);
+                icGenerationState->slowPathCall = callOperation(nonRepatchingFunction, resultRegs, childRegs);
 
             silentFill(savePlans);
             m_jit.exceptionCheck();
@@ -4681,7 +4677,7 @@
         });
     } else {
         flushRegisters();
-        callOperation(nonRepatchingFunction, mathICTag, resultRegs, childRegs);
+        callOperation(nonRepatchingFunction, resultRegs, childRegs);
         m_jit.exceptionCheck();
     }
 
@@ -9000,6 +8996,7 @@
     JSValueRegs resultRegs = result.regs();
 
     flushRegisters();
+    assertIsTaggedWith(reinterpret_cast<void*>(signature->unsafeFunction), CFunctionPtrTag);
     unsigned argumentCountIncludingThis = signature->argumentCount + 1;
     switch (argumentCountIncludingThis) {
     case 1:
@@ -9024,7 +9021,7 @@
 {
     DOMJIT::CallDOMGetterSnippet* snippet = node->callDOMGetterData()->snippet;
     if (!snippet) {
-        auto* getter = node->callDOMGetterData()->customAccessorGetter;
+        FunctionPtr<OperationPtrTag> getter = node->callDOMGetterData()->customAccessorGetter;
         SpeculateCellOperand base(this, node->child1());
         JSValueRegsTemporary result(this);
 
@@ -9035,7 +9032,7 @@
         m_jit.setupArguments<J_JITOperation_EJI>(CCallHelpers::CellValue(baseGPR), identifierUID(node->callDOMGetterData()->identifierNumber));
         m_jit.storePtr(GPRInfo::callFrameRegister, &m_jit.vm()->topCallFrame);
         m_jit.emitStoreCodeOrigin(m_currentNode->origin.semantic);
-        m_jit.appendCall(getter);
+        m_jit.appendCall(getter.retagged<CFunctionPtrTag>());
         m_jit.setupResults(resultRegs);
 
         m_jit.exceptionCheck();
@@ -10222,8 +10219,7 @@
 #if USE(JSVALUE64)
     m_jit.xor64(poisonScratch, scratch);
 #endif
-    PtrTag tag = ptrTag(SwitchTablePtrTag, &table);
-    m_jit.jump(scratch, tag);
+    m_jit.jump(scratch, JSSwitchPtrTag);
     data->didUseJumpTable = true;
 }
 
@@ -10249,8 +10245,6 @@
 
         value.use();
 
-        SimpleJumpTable& table = m_jit.codeBlock()->switchJumpTable(data->switchTableIndex);
-        PtrTag tag = ptrTag(SwitchTablePtrTag, &table);
         auto notInt32 = m_jit.branchIfNotInt32(valueRegs);
         emitSwitchIntJump(data, valueRegs.payloadGPR(), scratch, scratch2);
         notInt32.link(&m_jit);
@@ -10259,7 +10253,7 @@
         callOperation(operationFindSwitchImmTargetForDouble, scratch, valueRegs, data->switchTableIndex);
         silentFillAllRegisters();
 
-        m_jit.jump(scratch, tag);
+        m_jit.jump(scratch, JSSwitchPtrTag);
         noResult(node, UseChildrenCalledExplicitly);
         break;
     }
@@ -10516,16 +10510,13 @@
         }
         totalLength += string->length();
     }
-    
-    auto* codeBlock = m_jit.codeBlock();
+
     if (!canDoBinarySwitch || totalLength > Options::maximumBinaryStringSwitchTotalLength()) {
-        StringJumpTable& table = codeBlock->stringSwitchJumpTable(data->switchTableIndex);
-        PtrTag tag = ptrTag(SwitchTablePtrTag, &table);
         flushRegisters();
         callOperation(
             operationSwitchString, string, static_cast<size_t>(data->switchTableIndex), string);
         m_jit.exceptionCheck();
-        m_jit.jump(string, tag);
+        m_jit.jump(string, JSSwitchPtrTag);
         return;
     }
     
@@ -10558,15 +10549,12 @@
     emitBinarySwitchStringRecurse(
         data, cases, 0, 0, cases.size(), string, lengthGPR, tempGPR, 0, false);
     
-    StringJumpTable& table = codeBlock->stringSwitchJumpTable(data->switchTableIndex);
-    PtrTag tag = ptrTag(SwitchTablePtrTag, &table);
-
     slowCases.link(&m_jit);
     silentSpillAllRegisters(string);
     callOperation(operationSwitchString, string, static_cast<size_t>(data->switchTableIndex), string);
     silentFillAllRegisters();
     m_jit.exceptionCheck();
-    m_jit.jump(string, tag);
+    m_jit.jump(string, JSSwitchPtrTag);
 }
 
 void SpeculativeJIT::emitSwitchString(Node* node, SwitchData* data)
@@ -12713,7 +12701,7 @@
     }
     }
 
-    addSlowPathGenerator(slowPathCall(slowCases, this, operationHasIndexedPropertyByInt, HasPropertyPtrTag, resultGPR, baseGPR, indexGPR, static_cast<int32_t>(node->internalMethodType())));
+    addSlowPathGenerator(slowPathCall(slowCases, this, operationHasIndexedPropertyByInt, resultGPR, baseGPR, indexGPR, static_cast<int32_t>(node->internalMethodType())));
 
     unblessedBooleanResult(resultGPR, node);
 }
@@ -12781,7 +12769,7 @@
 
     done.link(&m_jit);
 
-    addSlowPathGenerator(slowPathCall(slowPath, this, operationGetByValCell, GetPropertyPtrTag, resultRegs, baseGPR, CCallHelpers::CellValue(propertyGPR)));
+    addSlowPathGenerator(slowPathCall(slowPath, this, operationGetByValCell, resultRegs, baseGPR, CCallHelpers::CellValue(propertyGPR)));
 
     jsValueResult(resultRegs, node);
 #endif
@@ -12889,7 +12877,7 @@
     slowCases.append(gen.slowPathJump());
 
     auto slowPath = slowPathCall(
-        slowCases, this, gen.slowPathFunction(), PutPropertyPtrTag, NoResult, gen.stubInfo(), valueRegs,
+        slowCases, this, gen.slowPathFunction(), NoResult, gen.stubInfo(), valueRegs,
         CCallHelpers::CellValue(baseGPR), identifierUID(identifierNumber));
 
     m_jit.addPutById(gen, slowPath.get());
diff --git a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.h b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.h
index 0ccc5a3..bbf174c 100644
--- a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.h
+++ b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT.h
@@ -931,30 +931,10 @@
     std::enable_if_t<
         FunctionTraits<OperationType>::hasResult,
     JITCompiler::Call>
-    callOperation(OperationType operation, PtrTag tag, ResultRegType result, Args... args)
-    {
-        m_jit.setupArguments<OperationType>(args...);
-        return appendCallSetResult(operation, tag, result);
-    }
-
-    template<typename OperationType, typename ResultRegType, typename... Args>
-    std::enable_if_t<
-        FunctionTraits<OperationType>::hasResult,
-    JITCompiler::Call>
     callOperation(OperationType operation, ResultRegType result, Args... args)
     {
-        return callOperation(operation, CFunctionPtrTag, result, args...);
-    }
-
-    template<typename OperationType, typename Arg, typename... Args>
-    std::enable_if_t<
-        !FunctionTraits<OperationType>::hasResult
-        && !std::is_same<Arg, NoResultTag>::value,
-    JITCompiler::Call>
-    callOperation(OperationType operation, PtrTag tag, Arg arg, Args... args)
-    {
-        m_jit.setupArguments<OperationType>(arg, args...);
-        return appendCall(operation, tag);
+        m_jit.setupArguments<OperationType>(args...);
+        return appendCallSetResult(operation, result);
     }
 
     template<typename OperationType, typename Arg, typename... Args>
@@ -964,17 +944,8 @@
     JITCompiler::Call>
     callOperation(OperationType operation, Arg arg, Args... args)
     {
-        return callOperation(operation, CFunctionPtrTag, arg, args...);
-    }
-
-    template<typename OperationType, typename... Args>
-    std::enable_if_t<
-        !FunctionTraits<OperationType>::hasResult,
-    JITCompiler::Call>
-    callOperation(OperationType operation, PtrTag tag, NoResultTag, Args... args)
-    {
-        m_jit.setupArguments<OperationType>(args...);
-        return appendCall(operation, tag);
+        m_jit.setupArguments<OperationType>(arg, args...);
+        return appendCall(operation);
     }
 
     template<typename OperationType, typename... Args>
@@ -983,17 +954,8 @@
     JITCompiler::Call>
     callOperation(OperationType operation, NoResultTag, Args... args)
     {
-        return callOperation(operation, CFunctionPtrTag, NoResult, args...);
-    }
-
-    template<typename OperationType>
-    std::enable_if_t<
-        !FunctionTraits<OperationType>::hasResult,
-    JITCompiler::Call>
-    callOperation(OperationType operation, PtrTag tag)
-    {
-        m_jit.setupArguments<OperationType>();
-        return appendCall(operation, tag);
+        m_jit.setupArguments<OperationType>(args...);
+        return appendCall(operation);
     }
 
     template<typename OperationType>
@@ -1002,7 +964,8 @@
     JITCompiler::Call>
     callOperation(OperationType operation)
     {
-        return callOperation(operation, CFunctionPtrTag);
+        m_jit.setupArguments<OperationType>();
+        return appendCall(operation);
     }
 
 #undef FIRST_ARGUMENT_TYPE
@@ -1039,62 +1002,56 @@
 #endif
 
     // These methods add call instructions, optionally setting results, and optionally rolling back the call frame on an exception.
-    JITCompiler::Call appendCall(const FunctionPtr function, PtrTag tag = CFunctionPtrTag)
+    JITCompiler::Call appendCall(const FunctionPtr<CFunctionPtrTag> function)
     {
         prepareForExternalCall();
         m_jit.emitStoreCodeOrigin(m_currentNode->origin.semantic);
-        return m_jit.appendCall(function, tag);
+        return m_jit.appendCall(function);
     }
-    JITCompiler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr function)
+
+    JITCompiler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr<CFunctionPtrTag> function)
     {
         JITCompiler::Call call = appendCall(function);
         m_jit.exceptionCheckWithCallFrameRollback();
         return call;
     }
-    JITCompiler::Call appendCallWithCallFrameRollbackOnExceptionSetResult(const FunctionPtr function, GPRReg result)
+
+    JITCompiler::Call appendCallWithCallFrameRollbackOnExceptionSetResult(const FunctionPtr<CFunctionPtrTag> function, GPRReg result)
     {
         JITCompiler::Call call = appendCallWithCallFrameRollbackOnException(function);
         if ((result != InvalidGPRReg) && (result != GPRInfo::returnValueGPR))
             m_jit.move(GPRInfo::returnValueGPR, result);
         return call;
     }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, GPRReg result)
+
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, GPRReg result)
     {
-        JITCompiler::Call call = appendCall(function, tag);
+        JITCompiler::Call call = appendCall(function);
         if (result != InvalidGPRReg)
             m_jit.move(GPRInfo::returnValueGPR, result);
         return call;
     }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, GPRReg result)
+
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, GPRReg result1, GPRReg result2)
     {
-        return appendCallSetResult(function, CFunctionPtrTag, result);
-    }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, GPRReg result1, GPRReg result2)
-    {
-        JITCompiler::Call call = appendCall(function, tag);
+        JITCompiler::Call call = appendCall(function);
         m_jit.setupResults(result1, result2);
         return call;
     }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, GPRReg result1, GPRReg result2)
-    {
-        return appendCallSetResult(function, CFunctionPtrTag, result1, result2);
-    }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, JSValueRegs resultRegs)
+
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, JSValueRegs resultRegs)
     {
 #if USE(JSVALUE64)
-        return appendCallSetResult(function, tag, resultRegs.gpr());
+        return appendCallSetResult(function, resultRegs.gpr());
 #else
-        return appendCallSetResult(function, tag, resultRegs.payloadGPR(), resultRegs.tagGPR());
+        return appendCallSetResult(function, resultRegs.payloadGPR(), resultRegs.tagGPR());
 #endif
     }
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, JSValueRegs resultRegs)
-    {
-        return appendCallSetResult(function, CFunctionPtrTag, resultRegs);
-    }
+
 #if CPU(X86)
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, FPRReg result)
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, FPRReg result)
     {
-        JITCompiler::Call call = appendCall(function, tag);
+        JITCompiler::Call call = appendCall(function);
         if (result != InvalidFPRReg) {
             m_jit.assembler().fstpl(0, JITCompiler::stackPointerRegister);
             m_jit.loadDouble(JITCompiler::stackPointerRegister, result);
@@ -1102,26 +1059,22 @@
         return call;
     }
 #elif CPU(ARM) && !CPU(ARM_HARDFP)
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, FPRReg result)
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, FPRReg result)
     {
-        JITCompiler::Call call = appendCall(function, tag);
+        JITCompiler::Call call = appendCall(function);
         if (result != InvalidFPRReg)
             m_jit.assembler().vmov(result, GPRInfo::returnValueGPR, GPRInfo::returnValueGPR2);
         return call;
     }
 #else // CPU(X86_64) || (CPU(ARM) && CPU(ARM_HARDFP)) || CPU(ARM64) || CPU(MIPS)
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, PtrTag tag, FPRReg result)
+    JITCompiler::Call appendCallSetResult(const FunctionPtr<CFunctionPtrTag> function, FPRReg result)
     {
-        JITCompiler::Call call = appendCall(function, tag);
+        JITCompiler::Call call = appendCall(function);
         if (result != InvalidFPRReg)
             m_jit.moveDouble(FPRInfo::returnValueFPR, result);
         return call;
     }
 #endif
-    JITCompiler::Call appendCallSetResult(const FunctionPtr function, FPRReg result)
-    {
-        return appendCallSetResult(function, CFunctionPtrTag, result);
-    }
 
     void branchDouble(JITCompiler::DoubleCondition cond, FPRReg left, FPRReg right, BasicBlock* destination)
     {
diff --git a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp
index 3d511e4..5c980e4 100644
--- a/Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp
+++ b/Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp
@@ -179,7 +179,7 @@
     slowCases.append(gen.slowPathJump());
 
     auto slowPath = slowPathCall(
-        slowCases, this, appropriateOptimizingGetByIdFunction(type), GetPropertyPtrTag,
+        slowCases, this, appropriateOptimizingGetByIdFunction(type),
         spillMode, ExceptionCheckRequirement::CheckNeeded,
         resultGPR, gen.stubInfo(), baseGPR, identifierUID(identifierNumber));
     
@@ -207,7 +207,7 @@
     slowCases.append(gen.slowPathJump());
     
     auto slowPath = slowPathCall(
-        slowCases, this, operationGetByIdWithThisOptimize, GetPropertyPtrTag,
+        slowCases, this, operationGetByIdWithThisOptimize,
         DontSpill, ExceptionCheckRequirement::CheckNeeded,
         resultGPR, gen.stubInfo(), baseGPR, thisGPR, identifierUID(identifierNumber));
     
@@ -2506,7 +2506,7 @@
             
             addSlowPathGenerator(
                 slowPathCall(
-                    slowCases, this, operationGetByValObjectInt, GetPropertyPtrTag,
+                    slowCases, this, operationGetByValObjectInt,
                     result.gpr(), baseReg, propertyReg));
             
             jsValueResult(resultReg, node);
@@ -2562,7 +2562,7 @@
             
             addSlowPathGenerator(
                 slowPathCall(
-                    slowCases, this, operationGetByValObjectInt, GetPropertyPtrTag,
+                    slowCases, this, operationGetByValObjectInt,
                     result.gpr(), baseReg, propertyReg));
             
             jsValueResult(resultReg, node);
@@ -2614,7 +2614,7 @@
     
             addSlowPathGenerator(
                 slowPathCall(
-                    slowCases, this, operationGetByValObjectInt, GetPropertyPtrTag,
+                    slowCases, this, operationGetByValObjectInt,
                     result.gpr(), baseReg, propertyReg));
             
             jsValueResult(resultReg, node);
@@ -2773,7 +2773,7 @@
                     m_jit.codeBlock()->isStrictMode()
                         ? (node->op() == PutByValDirect ? operationPutByValDirectBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsStrict)
                         : (node->op() == PutByValDirect ? operationPutByValDirectBeyondArrayBoundsNonStrict : operationPutByValBeyondArrayBoundsNonStrict),
-                    PutPropertyPtrTag, NoResult, baseReg, propertyReg, valueReg));
+                    NoResult, baseReg, propertyReg, valueReg));
             }
 
             noResult(node, UseChildrenCalledExplicitly);
@@ -2857,7 +2857,7 @@
                     m_jit.codeBlock()->isStrictMode()
                         ? (node->op() == PutByValDirect ? operationPutByValDirectBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsStrict)
                         : (node->op() == PutByValDirect ? operationPutByValDirectBeyondArrayBoundsNonStrict : operationPutByValBeyondArrayBoundsNonStrict),
-                    PutPropertyPtrTag, NoResult, baseReg, propertyReg, valueReg));
+                    NoResult, baseReg, propertyReg, valueReg));
             }
 
             noResult(node, UseChildrenCalledExplicitly);
diff --git a/Source/JavaScriptCore/dfg/DFGThunks.cpp b/Source/JavaScriptCore/dfg/DFGThunks.cpp
index f3c6824..78871bb 100644
--- a/Source/JavaScriptCore/dfg/DFGThunks.cpp
+++ b/Source/JavaScriptCore/dfg/DFGThunks.cpp
@@ -40,16 +40,15 @@
 
 namespace JSC { namespace DFG {
 
-MacroAssemblerCodeRef osrExitThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitThunkGenerator(VM* vm)
 {
     MacroAssembler jit;
     jit.probe(OSRExit::executeOSRExit, vm);
-    PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, osrExitThunkTag, "DFG OSR exit thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "DFG OSR exit thunk");
 }
 
-MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitGenerationThunkGenerator(VM* vm)
 {
     MacroAssembler jit;
 
@@ -83,8 +82,7 @@
     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
 #endif
 
-    PtrTag callTag = ptrTag(DFGOperationPtrTag, nextPtrTagID());
-    MacroAssembler::Call functionCall = jit.call(callTag);
+    MacroAssembler::Call functionCall = jit.call(OperationPtrTag);
 
     jit.move(MacroAssembler::TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::regT0);
     jit.storePtr(MacroAssembler::TrustedImmPtr(nullptr), MacroAssembler::Address(GPRInfo::regT0));
@@ -101,17 +99,16 @@
 #endif
     }
 
-    jit.jump(MacroAssembler::AbsoluteAddress(&vm->osrExitJumpDestination), ptrTag(DFGOSRExitPtrTag, vm));
+    jit.jump(MacroAssembler::AbsoluteAddress(&vm->osrExitJumpDestination), OSRExitPtrTag);
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
     
-    patchBuffer.link(functionCall, FunctionPtr(OSRExit::compileOSRExit, callTag));
+    patchBuffer.link(functionCall, FunctionPtr<OperationPtrTag>(OSRExit::compileOSRExit));
 
-    PtrTag osrExitThunkTag = ptrTag(DFGOSRExitPtrTag, vm);
-    return FINALIZE_CODE(patchBuffer, osrExitThunkTag, "DFG OSR exit generation thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "DFG OSR exit generation thunk");
 }
 
-MacroAssemblerCodeRef osrEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> osrEntryThunkGenerator(VM* vm)
 {
     AssemblyHelpers jit(nullptr);
 
@@ -149,9 +146,8 @@
 
     jit.jump(GPRInfo::regT1, GPRInfo::callFrameRegister);
 
-    PtrTag osrEntryThunkTag = ptrTag(DFGOSREntryPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, osrEntryThunkTag, "DFG OSR entry thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "DFG OSR entry thunk");
 }
 
 } } // namespace JSC::DFG
diff --git a/Source/JavaScriptCore/dfg/DFGThunks.h b/Source/JavaScriptCore/dfg/DFGThunks.h
index 03cb34d..a4d560e 100644
--- a/Source/JavaScriptCore/dfg/DFGThunks.h
+++ b/Source/JavaScriptCore/dfg/DFGThunks.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -35,9 +35,9 @@
 
 namespace DFG {
 
-MacroAssemblerCodeRef osrExitThunkGenerator(VM*);
-MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM*);
-MacroAssemblerCodeRef osrEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitGenerationThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> osrEntryThunkGenerator(VM*);
 
 } } // namespace JSC::DFG
 
diff --git a/Source/JavaScriptCore/disassembler/ARM64Disassembler.cpp b/Source/JavaScriptCore/disassembler/ARM64Disassembler.cpp
index d775889..093b3fa 100644
--- a/Source/JavaScriptCore/disassembler/ARM64Disassembler.cpp
+++ b/Source/JavaScriptCore/disassembler/ARM64Disassembler.cpp
@@ -33,11 +33,11 @@
 
 namespace JSC {
 
-bool tryToDisassemble(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out)
+bool tryToDisassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>& codePtr, size_t size, const char* prefix, PrintStream& out)
 {
     A64DOpcode arm64Opcode;
 
-    uint32_t* currentPC = removeCodePtrTag<uint32_t*>(codePtr.executableAddress());
+    uint32_t* currentPC = codePtr.untaggedExecutableAddress<uint32_t*>();
     size_t byteCount = size;
 
     while (byteCount) {
diff --git a/Source/JavaScriptCore/disassembler/ARMv7Disassembler.cpp b/Source/JavaScriptCore/disassembler/ARMv7Disassembler.cpp
index d218f7b..6be31c8 100644
--- a/Source/JavaScriptCore/disassembler/ARMv7Disassembler.cpp
+++ b/Source/JavaScriptCore/disassembler/ARMv7Disassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -33,11 +33,11 @@
 
 namespace JSC {
 
-bool tryToDisassemble(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out)
+bool tryToDisassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>& codePtr, size_t size, const char* prefix, PrintStream& out)
 {
     ARMv7DOpcode armOpcode;
 
-    uint16_t* currentPC = reinterpret_cast<uint16_t*>(reinterpret_cast<uintptr_t>(codePtr.executableAddress())&~1);
+    uint16_t* currentPC = reinterpret_cast<uint16_t*>(reinterpret_cast<uintptr_t>(codePtr.untaggedExecutableAddress())&~1);
     uint16_t* endPC = currentPC + (size / sizeof(uint16_t));
 
     while (currentPC < endPC) {
diff --git a/Source/JavaScriptCore/disassembler/Disassembler.cpp b/Source/JavaScriptCore/disassembler/Disassembler.cpp
index 27b4fd1..19b115c 100644
--- a/Source/JavaScriptCore/disassembler/Disassembler.cpp
+++ b/Source/JavaScriptCore/disassembler/Disassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -37,12 +37,12 @@
 
 namespace JSC {
 
-void disassemble(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out)
+void disassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>& codePtr, size_t size, const char* prefix, PrintStream& out)
 {
     if (tryToDisassemble(codePtr, size, prefix, out))
         return;
     
-    out.printf("%sdisassembly not available for range %p...%p\n", prefix, codePtr.executableAddress(), codePtr.executableAddress<char*>() + size);
+    out.printf("%sdisassembly not available for range %p...%p\n", prefix, codePtr.untaggedExecutableAddress(), codePtr.untaggedExecutableAddress<char*>() + size);
 }
 
 namespace {
@@ -64,7 +64,7 @@
     }
     
     char* header { nullptr };
-    MacroAssemblerCodeRef codeRef;
+    MacroAssemblerCodeRef<DisassemblyPtrTag> codeRef;
     size_t size { 0 };
     const char* prefix { nullptr };
 };
@@ -128,7 +128,7 @@
 } // anonymous namespace
 
 void disassembleAsynchronously(
-    const CString& header, const MacroAssemblerCodeRef& codeRef, size_t size, const char* prefix)
+    const CString& header, const MacroAssemblerCodeRef<DisassemblyPtrTag>& codeRef, size_t size, const char* prefix)
 {
     std::unique_ptr<DisassemblyTask> task = std::make_unique<DisassemblyTask>();
     task->header = strdup(header.data()); // Yuck! We need this because CString does racy refcounting.
diff --git a/Source/JavaScriptCore/disassembler/Disassembler.h b/Source/JavaScriptCore/disassembler/Disassembler.h
index 9317449..dd45e66 100644
--- a/Source/JavaScriptCore/disassembler/Disassembler.h
+++ b/Source/JavaScriptCore/disassembler/Disassembler.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,19 +26,20 @@
 #pragma once
 
 #include "JSExportMacros.h"
+#include "PtrTag.h"
 #include <functional>
 #include <wtf/PrintStream.h>
 #include <wtf/text/CString.h>
 
 namespace JSC {
 
-class MacroAssemblerCodePtr;
-class MacroAssemblerCodeRef;
+template<PtrTag> class MacroAssemblerCodePtr;
+template<PtrTag> class MacroAssemblerCodeRef;
 
 #if ENABLE(DISASSEMBLER)
-bool tryToDisassemble(const MacroAssemblerCodePtr&, size_t, const char* prefix, PrintStream&);
+bool tryToDisassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>&, size_t, const char* prefix, PrintStream&);
 #else
-inline bool tryToDisassemble(const MacroAssemblerCodePtr&, size_t, const char*, PrintStream&)
+inline bool tryToDisassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>&, size_t, const char*, PrintStream&)
 {
     return false;
 }
@@ -46,12 +47,12 @@
 
 // Prints either the disassembly, or a line of text indicating that disassembly failed and
 // the range of machine code addresses.
-void disassemble(const MacroAssemblerCodePtr&, size_t, const char* prefix, PrintStream& out);
+void disassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>&, size_t, const char* prefix, PrintStream& out);
 
 // Asynchronous disassembly. This happens on another thread, and calls the provided
 // callback when the disassembly is done.
 void disassembleAsynchronously(
-    const CString& header, const MacroAssemblerCodeRef&, size_t, const char* prefix);
+    const CString& header, const MacroAssemblerCodeRef<DisassemblyPtrTag>&, size_t, const char* prefix);
 
 JS_EXPORT_PRIVATE void waitForAsynchronousDisassembly();
 
diff --git a/Source/JavaScriptCore/disassembler/UDis86Disassembler.cpp b/Source/JavaScriptCore/disassembler/UDis86Disassembler.cpp
index 26abe2d..1ff8155 100644
--- a/Source/JavaScriptCore/disassembler/UDis86Disassembler.cpp
+++ b/Source/JavaScriptCore/disassembler/UDis86Disassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -33,17 +33,17 @@
 
 namespace JSC {
 
-bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out)
+bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr<DisassemblyPtrTag>& codePtr, size_t size, const char* prefix, PrintStream& out)
 {
     ud_t disassembler;
     ud_init(&disassembler);
-    ud_set_input_buffer(&disassembler, codePtr.executableAddress<unsigned char*>(), size);
+    ud_set_input_buffer(&disassembler, codePtr.untaggedExecutableAddress<unsigned char*>(), size);
 #if CPU(X86_64)
     ud_set_mode(&disassembler, 64);
 #else
     ud_set_mode(&disassembler, 32);
 #endif
-    ud_set_pc(&disassembler, codePtr.executableAddress<uintptr_t>());
+    ud_set_pc(&disassembler, codePtr.untaggedExecutableAddress<uintptr_t>());
     ud_set_syntax(&disassembler, UD_SYN_ATT);
     
     uint64_t currentPC = disassembler.pc;
diff --git a/Source/JavaScriptCore/disassembler/UDis86Disassembler.h b/Source/JavaScriptCore/disassembler/UDis86Disassembler.h
index 8de53ea..7f89069 100644
--- a/Source/JavaScriptCore/disassembler/UDis86Disassembler.h
+++ b/Source/JavaScriptCore/disassembler/UDis86Disassembler.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -31,11 +31,11 @@
 
 #if USE(UDIS86)
 
-bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out);
+bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr<DisassemblyPtrTag>&, size_t, const char* prefix, PrintStream& out);
 
 #else // USE(UDIS86)
 
-inline bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr&, size_t, const char*, PrintStream&) { return false; }
+inline bool tryToDisassembleWithUDis86(const MacroAssemblerCodePtr<DisassemblyPtrTag>&, size_t, const char*, PrintStream&) { return false; }
 
 #endif // USE(UDIS86)
 
diff --git a/Source/JavaScriptCore/disassembler/X86Disassembler.cpp b/Source/JavaScriptCore/disassembler/X86Disassembler.cpp
index 247b227..ef6ca82 100644
--- a/Source/JavaScriptCore/disassembler/X86Disassembler.cpp
+++ b/Source/JavaScriptCore/disassembler/X86Disassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -34,7 +34,7 @@
 
 namespace JSC {
 
-bool tryToDisassemble(const MacroAssemblerCodePtr& codePtr, size_t size, const char* prefix, PrintStream& out)
+bool tryToDisassemble(const MacroAssemblerCodePtr<DisassemblyPtrTag>& codePtr, size_t size, const char* prefix, PrintStream& out)
 {
     return tryToDisassembleWithUDis86(codePtr, size, prefix, out);
 }
diff --git a/Source/JavaScriptCore/ftl/FTLCompile.cpp b/Source/JavaScriptCore/ftl/FTLCompile.cpp
index 00f92ef..a55e5b6 100644
--- a/Source/JavaScriptCore/ftl/FTLCompile.cpp
+++ b/Source/JavaScriptCore/ftl/FTLCompile.cpp
@@ -134,12 +134,11 @@
     jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
     jit.move(MacroAssembler::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
-    PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-    CCallHelpers::Call call = jit.call(callTag);
+    CCallHelpers::Call call = jit.call(OperationPtrTag);
     jit.jumpToExceptionHandler(vm);
     jit.addLinkTask(
         [=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(call, FunctionPtr(lookupExceptionHandler, callTag));
+            linkBuffer.link(call, FunctionPtr<OperationPtrTag>(lookupExceptionHandler));
         });
 
     state.finalizer->b3CodeLinkBuffer = std::make_unique<LinkBuffer>(jit, codeBlock, JITCompilationCanFail);
@@ -153,9 +152,8 @@
     if (vm.shouldBuilderPCToCodeOriginMapping())
         codeBlock->setPCToCodeOriginMap(std::make_unique<PCToCodeOriginMap>(PCToCodeOriginMapBuilder(vm, WTFMove(originMap)), *state.finalizer->b3CodeLinkBuffer));
 
-    PtrTag entryTag = ptrTag(FTLCodePtrTag, codeBlock);
-    CodeLocationLabel label = state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(0), entryTag);
-    state.generatedFunction = label.executableAddress<GeneratedFunction>();
+    CodeLocationLabel<JSEntryPtrTag> label = state.finalizer->b3CodeLinkBuffer->locationOf<JSEntryPtrTag>(state.proc->entrypointLabel(0));
+    state.generatedFunction = label;
     state.jitCode->initializeB3Byproducts(state.proc->releaseByproducts());
 
     for (auto pair : state.graph.m_entrypointIndexToCatchBytecodeOffset) {
@@ -163,7 +161,7 @@
         unsigned entrypointIndex = pair.key;
         Vector<FlushFormat> argumentFormats = state.graph.m_argumentFormats[entrypointIndex];
         state.jitCode->common.appendCatchEntrypoint(
-            catchBytecodeOffset, state.finalizer->b3CodeLinkBuffer->locationOf(state.proc->entrypointLabel(entrypointIndex), ExceptionHandlerPtrTag).executableAddress(), WTFMove(argumentFormats));
+            catchBytecodeOffset, state.finalizer->b3CodeLinkBuffer->locationOf<ExceptionHandlerPtrTag>(state.proc->entrypointLabel(entrypointIndex)), WTFMove(argumentFormats));
     }
     state.jitCode->common.finalizeCatchEntrypoints();
 
diff --git a/Source/JavaScriptCore/ftl/FTLExceptionTarget.cpp b/Source/JavaScriptCore/ftl/FTLExceptionTarget.cpp
index a8a502e..0d4f523 100644
--- a/Source/JavaScriptCore/ftl/FTLExceptionTarget.cpp
+++ b/Source/JavaScriptCore/ftl/FTLExceptionTarget.cpp
@@ -36,11 +36,11 @@
 {
 }
 
-CodeLocationLabel ExceptionTarget::label(LinkBuffer& linkBuffer, PtrTag handlerTag)
+CodeLocationLabel<ExceptionHandlerPtrTag> ExceptionTarget::label(LinkBuffer& linkBuffer)
 {
     if (m_isDefaultHandler)
-        return linkBuffer.locationOf(*m_defaultHandler, handlerTag);
-    return linkBuffer.locationOf(m_handle->label, handlerTag);
+        return linkBuffer.locationOf<ExceptionHandlerPtrTag>(*m_defaultHandler);
+    return linkBuffer.locationOf<ExceptionHandlerPtrTag>(m_handle->label);
 }
 
 Box<CCallHelpers::JumpList> ExceptionTarget::jumps(CCallHelpers& jit)
@@ -50,13 +50,13 @@
         Box<CCallHelpers::Label> defaultHandler = m_defaultHandler;
         jit.addLinkTask(
             [=] (LinkBuffer& linkBuffer) {
-                linkBuffer.link(*result, linkBuffer.locationOf(*defaultHandler, ExceptionHandlerPtrTag));
+                linkBuffer.link(*result, linkBuffer.locationOf<ExceptionHandlerPtrTag>(*defaultHandler));
             });
     } else {
         RefPtr<OSRExitHandle> handle = m_handle;
         jit.addLinkTask(
             [=] (LinkBuffer& linkBuffer) {
-                linkBuffer.link(*result, linkBuffer.locationOf(handle->label, DFGOSRExitPtrTag));
+                linkBuffer.link(*result, linkBuffer.locationOf<OSRExitPtrTag>(handle->label));
             });
     }
     return result;
diff --git a/Source/JavaScriptCore/ftl/FTLExceptionTarget.h b/Source/JavaScriptCore/ftl/FTLExceptionTarget.h
index 5bf95f5..dd21437 100644
--- a/Source/JavaScriptCore/ftl/FTLExceptionTarget.h
+++ b/Source/JavaScriptCore/ftl/FTLExceptionTarget.h
@@ -43,7 +43,7 @@
     ~ExceptionTarget();
 
     // It's OK to call this during linking, but not any sooner.
-    CodeLocationLabel label(LinkBuffer&, PtrTag handlerTag);
+    CodeLocationLabel<ExceptionHandlerPtrTag> label(LinkBuffer&);
 
     // Or, you can get a JumpList at any time. Anything you add to this JumpList will be linked to
     // the target's label.
diff --git a/Source/JavaScriptCore/ftl/FTLGeneratedFunction.h b/Source/JavaScriptCore/ftl/FTLGeneratedFunction.h
index f6fba28..01de3980 100644
--- a/Source/JavaScriptCore/ftl/FTLGeneratedFunction.h
+++ b/Source/JavaScriptCore/ftl/FTLGeneratedFunction.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -28,10 +28,12 @@
 #if ENABLE(FTL_JIT)
 
 #include "CallFrame.h"
+#include "MacroAssemblerCodeRef.h"
 
 namespace JSC { namespace FTL {
 
-typedef EncodedJSValue (*GeneratedFunction)(ExecState*);
+// Points to a function of prototype: EncodedJSValue (*)(ExecState*).
+using GeneratedFunction = CodeLocationLabel<JSEntryPtrTag>;
 
 } } // namespace JSC::FTL
 
diff --git a/Source/JavaScriptCore/ftl/FTLJITCode.cpp b/Source/JavaScriptCore/ftl/FTLJITCode.cpp
index a9e8987..931bf12 100644
--- a/Source/JavaScriptCore/ftl/FTLJITCode.cpp
+++ b/Source/JavaScriptCore/ftl/FTLJITCode.cpp
@@ -51,7 +51,7 @@
     }
 }
 
-void JITCode::initializeB3Code(CodeRef b3Code)
+void JITCode::initializeB3Code(CodeRef<JSEntryPtrTag> b3Code)
 {
     m_b3Code = b3Code;
 }
@@ -61,17 +61,17 @@
     m_b3Byproducts = WTFMove(byproducts);
 }
 
-void JITCode::initializeAddressForCall(CodePtr address)
+void JITCode::initializeAddressForCall(CodePtr<JSEntryPtrTag> address)
 {
     m_addressForCall = address;
 }
 
-void JITCode::initializeArityCheckEntrypoint(CodeRef entrypoint)
+void JITCode::initializeArityCheckEntrypoint(CodeRef<JSEntryPtrTag> entrypoint)
 {
     m_arityCheckEntrypoint = entrypoint;
 }
 
-JITCode::CodePtr JITCode::addressForCall(ArityCheckMode arityCheck)
+JITCode::CodePtr<JSEntryPtrTag> JITCode::addressForCall(ArityCheckMode arityCheck)
 {
     switch (arityCheck) {
     case ArityCheckNotRequired:
@@ -80,18 +80,16 @@
         return m_arityCheckEntrypoint.code();
     }
     RELEASE_ASSERT_NOT_REACHED();
-    return CodePtr();
+    return CodePtr<JSEntryPtrTag>();
 }
 
 void* JITCode::executableAddressAtOffset(size_t offset)
 {
-    return m_addressForCall.executableAddress<char*>() + offset;
-    assertIsTaggedWith(m_addressForCall.executableAddress(), CodePtrTag);
     if (!offset)
         return m_addressForCall.executableAddress();
 
-    char* executableAddress = untagCodePtr<char*>(m_addressForCall.executableAddress(), CodePtrTag);
-    return tagCodePtr(executableAddress + offset, CodePtrTag);
+    char* executableAddress = m_addressForCall.untaggedExecutableAddress<char*>();
+    return tagCodePtr<JSEntryPtrTag>(executableAddress + offset);
 }
 
 void* JITCode::dataAddressAtOffset(size_t)
diff --git a/Source/JavaScriptCore/ftl/FTLJITCode.h b/Source/JavaScriptCore/ftl/FTLJITCode.h
index 2c2809e..2f9866f 100644
--- a/Source/JavaScriptCore/ftl/FTLJITCode.h
+++ b/Source/JavaScriptCore/ftl/FTLJITCode.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -44,17 +44,17 @@
     JITCode();
     ~JITCode();
 
-    CodePtr addressForCall(ArityCheckMode) override;
+    CodePtr<JSEntryPtrTag> addressForCall(ArityCheckMode) override;
     void* executableAddressAtOffset(size_t offset) override;
     void* dataAddressAtOffset(size_t offset) override;
     unsigned offsetOf(void* pointerIntoCode) override;
     size_t size() override;
     bool contains(void*) override;
 
-    void initializeB3Code(CodeRef);
+    void initializeB3Code(CodeRef<JSEntryPtrTag>);
     void initializeB3Byproducts(std::unique_ptr<B3::OpaqueByproducts>);
-    void initializeAddressForCall(CodePtr);
-    void initializeArityCheckEntrypoint(CodeRef);
+    void initializeAddressForCall(CodePtr<JSEntryPtrTag>);
+    void initializeArityCheckEntrypoint(CodeRef<JSEntryPtrTag>);
     
     void validateReferences(const TrackedReferences&) override;
 
@@ -62,7 +62,7 @@
 
     std::optional<CodeOrigin> findPC(CodeBlock*, void* pc) override;
 
-    CodeRef b3Code() const { return m_b3Code; }
+    CodeRef<JSEntryPtrTag> b3Code() const { return m_b3Code; }
     
     JITCode* ftl() override;
     DFG::CommonData* dfgCommon() override;
@@ -74,10 +74,10 @@
     Vector<std::unique_ptr<LazySlowPath>> lazySlowPaths;
     
 private:
-    CodePtr m_addressForCall;
-    CodeRef m_b3Code;
+    CodePtr<JSEntryPtrTag> m_addressForCall;
+    CodeRef<JSEntryPtrTag> m_b3Code;
     std::unique_ptr<B3::OpaqueByproducts> m_b3Byproducts;
-    CodeRef m_arityCheckEntrypoint;
+    CodeRef<JSEntryPtrTag> m_arityCheckEntrypoint;
 };
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLJITFinalizer.cpp b/Source/JavaScriptCore/ftl/FTLJITFinalizer.cpp
index 6637fd6..0919f0b 100644
--- a/Source/JavaScriptCore/ftl/FTLJITFinalizer.cpp
+++ b/Source/JavaScriptCore/ftl/FTLJITFinalizer.cpp
@@ -73,14 +73,14 @@
 {
     bool dumpDisassembly = shouldDumpDisassembly() || Options::asyncDisassembly();
     
-    MacroAssemblerCodeRef b3CodeRef =
-        FINALIZE_CODE_IF(dumpDisassembly, *b3CodeLinkBuffer, CodePtrTag,
+    MacroAssemblerCodeRef<JSEntryPtrTag> b3CodeRef =
+        FINALIZE_CODE_IF(dumpDisassembly, *b3CodeLinkBuffer, JSEntryPtrTag,
             "FTL B3 code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data());
 
-    MacroAssemblerCodeRef arityCheckCodeRef = entrypointLinkBuffer
-        ? FINALIZE_CODE_IF(dumpDisassembly, *entrypointLinkBuffer, CodePtrTag,
+    MacroAssemblerCodeRef<JSEntryPtrTag> arityCheckCodeRef = entrypointLinkBuffer
+        ? FINALIZE_CODE_IF(dumpDisassembly, *entrypointLinkBuffer, JSEntryPtrTag,
             "FTL entrypoint thunk for %s with B3 generated code at %p", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data(), function)
-        : MacroAssemblerCodeRef::createSelfManagedCodeRef(b3CodeRef.code());
+        : MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(b3CodeRef.code());
 
     jitCode->initializeB3Code(b3CodeRef);
     jitCode->initializeArityCheckEntrypoint(arityCheckCodeRef);
diff --git a/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp b/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp
index 1d12130..a5d3d4f 100644
--- a/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp
+++ b/Source/JavaScriptCore/ftl/FTLLazySlowPath.cpp
@@ -38,8 +38,8 @@
 }
 
 void LazySlowPath::initialize(
-    CodeLocationJump patchableJump, CodeLocationLabel done,
-    CodeLocationLabel exceptionTarget,
+    CodeLocationJump<JSInternalPtrTag> patchableJump, CodeLocationLabel<JSEntryPtrTag> done,
+    CodeLocationLabel<ExceptionHandlerPtrTag> exceptionTarget,
     const RegisterSet& usedRegisters, CallSiteIndex callSiteIndex, RefPtr<Generator> generator
     )
 {
@@ -63,14 +63,13 @@
 
     m_generator->run(jit, params);
 
-    PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(this));
     LinkBuffer linkBuffer(jit, codeBlock, JITCompilationMustSucceed);
-    linkBuffer.link(params.doneJumps, m_done.retagged(slowPathTag, NearCodePtrTag));
+    linkBuffer.link(params.doneJumps, m_done);
     if (m_exceptionTarget)
-        linkBuffer.link(exceptionJumps, m_exceptionTarget.retagged(slowPathTag, NearCodePtrTag));
-    m_stub = FINALIZE_CODE_FOR(codeBlock, linkBuffer, slowPathTag, "Lazy slow path call stub");
+        linkBuffer.link(exceptionJumps, m_exceptionTarget);
+    m_stub = FINALIZE_CODE_FOR(codeBlock, linkBuffer, JITStubRoutinePtrTag, "Lazy slow path call stub");
 
-    MacroAssembler::repatchJump(m_patchableJump.retagged(slowPathTag, NearCodePtrTag), CodeLocationLabel(m_stub.retaggedCode(slowPathTag, NearCodePtrTag)));
+    MacroAssembler::repatchJump(m_patchableJump, CodeLocationLabel<JITStubRoutinePtrTag>(m_stub.code()));
 }
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLLazySlowPath.h b/Source/JavaScriptCore/ftl/FTLLazySlowPath.h
index 0325ee6..767ef12 100644
--- a/Source/JavaScriptCore/ftl/FTLLazySlowPath.h
+++ b/Source/JavaScriptCore/ftl/FTLLazySlowPath.h
@@ -69,27 +69,29 @@
     ~LazySlowPath();
 
     void initialize(
-        CodeLocationJump patchableJump, CodeLocationLabel done,
-        CodeLocationLabel exceptionTarget, const RegisterSet& usedRegisters,
+        CodeLocationJump<JSInternalPtrTag> patchableJump, CodeLocationLabel<JSEntryPtrTag> done,
+        CodeLocationLabel<ExceptionHandlerPtrTag> exceptionTarget, const RegisterSet& usedRegisters,
         CallSiteIndex, RefPtr<Generator>
         );
 
-    CodeLocationJump patchableJump() const { return m_patchableJump; }
-    CodeLocationLabel done() const { return m_done; }
+    CodeLocationJump<JSInternalPtrTag> patchableJump() const { return m_patchableJump; }
+    CodeLocationLabel<JSEntryPtrTag> done() const { return m_done; }
     const RegisterSet& usedRegisters() const { return m_usedRegisters; }
     CallSiteIndex callSiteIndex() const { return m_callSiteIndex; }
 
     void generate(CodeBlock*);
 
-    MacroAssemblerCodeRef stub() const { return m_stub; }
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> stub() const { return m_stub; }
 
 private:
-    CodeLocationJump m_patchableJump;
-    CodeLocationLabel m_done;
-    CodeLocationLabel m_exceptionTarget;
+    CodeLocationJump<JSInternalPtrTag> m_patchableJump;
+    // FIXME: This should be tagged with JSInternalPtrTag instead of JSEntryTag.
+    // https://bugs.webkit.org/show_bug.cgi?id=184712
+    CodeLocationLabel<JSEntryPtrTag> m_done;
+    CodeLocationLabel<ExceptionHandlerPtrTag> m_exceptionTarget;
     RegisterSet m_usedRegisters;
     CallSiteIndex m_callSiteIndex;
-    MacroAssemblerCodeRef m_stub;
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> m_stub;
     RefPtr<Generator> m_generator;
 };
 
diff --git a/Source/JavaScriptCore/ftl/FTLLazySlowPathCall.h b/Source/JavaScriptCore/ftl/FTLLazySlowPathCall.h
index b768c4ff..bfb2ace 100644
--- a/Source/JavaScriptCore/ftl/FTLLazySlowPathCall.h
+++ b/Source/JavaScriptCore/ftl/FTLLazySlowPathCall.h
@@ -38,7 +38,7 @@
 
 template<typename ResultType, typename... ArgumentTypes>
 RefPtr<LazySlowPath::Generator> createLazyCallGenerator(
-    VM& vm, FunctionPtr function, ResultType result, ArgumentTypes... arguments)
+    VM& vm, FunctionPtr<CFunctionPtrTag> function, ResultType result, ArgumentTypes... arguments)
 {
     return LazySlowPath::createGenerator(
         [=, &vm] (CCallHelpers& jit, LazySlowPath::GenerationParams& params) {
diff --git a/Source/JavaScriptCore/ftl/FTLLink.cpp b/Source/JavaScriptCore/ftl/FTLLink.cpp
index 6aac19a..e36db5cf 100644
--- a/Source/JavaScriptCore/ftl/FTLLink.cpp
+++ b/Source/JavaScriptCore/ftl/FTLLink.cpp
@@ -140,15 +140,13 @@
             jit.emitFunctionPrologue();
             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
             jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
-            PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-            CCallHelpers::Call callArityCheck = jit.call(callTag);
+            CCallHelpers::Call callArityCheck = jit.call(OperationPtrTag);
 
             auto noException = jit.branch32(CCallHelpers::GreaterThanOrEqual, GPRInfo::returnValueGPR, CCallHelpers::TrustedImm32(0));
             jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm.topEntryFrame);
             jit.move(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::argumentGPR0);
             jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
-            PtrTag lookupTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-            CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(lookupTag);
+            CCallHelpers::Call callLookupExceptionHandlerFromCallerFrame = jit.call(OperationPtrTag);
             jit.jumpToExceptionHandler(vm);
             noException.link(&jit);
 
@@ -172,14 +170,13 @@
                 state.allocationFailed = true;
                 return;
             }
-            linkBuffer->link(callArityCheck, FunctionPtr(codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck, callTag));
-            linkBuffer->link(callLookupExceptionHandlerFromCallerFrame, FunctionPtr(lookupExceptionHandlerFromCallerFrame, lookupTag));
-            linkBuffer->link(callArityFixup, FunctionPtr(vm.getCTIStub(arityFixupGenerator).retaggedCode(ptrTag(ArityFixupPtrTag, &vm), NearCodePtrTag)));
-            linkBuffer->link(mainPathJumps, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
+            linkBuffer->link(callArityCheck, FunctionPtr<OperationPtrTag>(codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck));
+            linkBuffer->link(callLookupExceptionHandlerFromCallerFrame, FunctionPtr<OperationPtrTag>(lookupExceptionHandlerFromCallerFrame));
+            linkBuffer->link(callArityFixup, FunctionPtr<JITThunkPtrTag>(vm.getCTIStub(arityFixupGenerator).code()));
+            linkBuffer->link(mainPathJumps, state.generatedFunction);
         }
-        
-        PtrTag entryTag = ptrTag(FTLCodePtrTag, codeBlock);
-        state.jitCode->initializeAddressForCall(MacroAssemblerCodePtr(retagCodePtr<void*>(state.generatedFunction, entryTag, CodePtrTag)));
+
+        state.jitCode->initializeAddressForCall(state.generatedFunction);
         break;
     }
         
@@ -198,9 +195,9 @@
             state.allocationFailed = true;
             return;
         }
-        linkBuffer->link(mainPathJump, CodeLocationLabel(bitwise_cast<void*>(state.generatedFunction)));
+        linkBuffer->link(mainPathJump, state.generatedFunction);
 
-        state.jitCode->initializeAddressForCall(linkBuffer->locationOf(start, CodePtrTag));
+        state.jitCode->initializeAddressForCall(linkBuffer->locationOf<JSEntryPtrTag>(start));
         break;
     }
         
diff --git a/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp b/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
index 1e20afb..6975f7a 100644
--- a/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
+++ b/Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
@@ -286,19 +286,17 @@
 
                     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
                     jit.move(CCallHelpers::TrustedImmPtr(jit.codeBlock()), GPRInfo::argumentGPR1);
-                    PtrTag throwTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-                    CCallHelpers::Call throwCall = jit.call(throwTag);
+                    CCallHelpers::Call throwCall = jit.call(OperationPtrTag);
 
                     jit.move(CCallHelpers::TrustedImmPtr(vm), GPRInfo::argumentGPR0);
                     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
-                    PtrTag lookupTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-                    CCallHelpers::Call lookupExceptionHandlerCall = jit.call(lookupTag);
+                    CCallHelpers::Call lookupExceptionHandlerCall = jit.call(OperationPtrTag);
                     jit.jumpToExceptionHandler(*vm);
 
                     jit.addLinkTask(
                         [=] (LinkBuffer& linkBuffer) {
-                            linkBuffer.link(throwCall, FunctionPtr(operationThrowStackOverflowError, throwTag));
-                            linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr(lookupExceptionHandlerFromCallerFrame, lookupTag));
+                            linkBuffer.link(throwCall, FunctionPtr<OperationPtrTag>(operationThrowStackOverflowError));
+                            linkBuffer.link(lookupExceptionHandlerCall, FunctionPtr<OperationPtrTag>(lookupExceptionHandlerFromCallerFrame));
                     });
                 });
             });
@@ -365,7 +363,7 @@
                 CCallHelpers::Jump jump = jit.jump();
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
-                        linkBuffer.link(jump, linkBuffer.locationOf(*exceptionHandler, ExceptionHandlerPtrTag));
+                        linkBuffer.link(jump, linkBuffer.locationOf<ExceptionHandlerPtrTag>(*exceptionHandler));
                     });
             });
         m_out.unreachable();
@@ -7170,15 +7168,13 @@
 
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
-                        PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
-                        MacroAssemblerCodePtr linkCall =
-                            vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag);
-                        linkBuffer.link(slowCall, FunctionPtr(linkCall));
+                        MacroAssemblerCodePtr<JITThunkPtrTag> linkCall = vm->getCTIStub(linkCallThunkGenerator).code();
+                        linkBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(linkCall));
 
                         callLinkInfo->setCallLocations(
-                            CodeLocationLabel(linkBuffer.locationOfNearCall(slowCall)),
-                            CodeLocationLabel(linkBuffer.locationOf(targetToCheck)),
-                            linkBuffer.locationOfNearCall(fastCall));
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(slowCall)),
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(targetToCheck)),
+                            linkBuffer.locationOfNearCall<JSEntryPtrTag>(fastCall));
                     });
             });
 
@@ -7318,9 +7314,9 @@
                     
                     jit.addLinkTask(
                         [=] (LinkBuffer& linkBuffer) {
-                            CodeLocationLabel patchableJumpLocation = linkBuffer.locationOf(patchableJump);
-                            CodeLocationNearCall callLocation = linkBuffer.locationOfNearCall(call);
-                            CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath, SlowPathPtrTag);
+                            CodeLocationLabel<JSEntryPtrTag> patchableJumpLocation = linkBuffer.locationOf<JSEntryPtrTag>(patchableJump);
+                            CodeLocationNearCall<JSEntryPtrTag> callLocation = linkBuffer.locationOfNearCall<JSEntryPtrTag>(call);
+                            CodeLocationLabel<JSEntryPtrTag> slowPathLocation = linkBuffer.locationOf<JSEntryPtrTag>(slowPath);
                             
                             callLinkInfo->setCallLocations(
                                 patchableJumpLocation,
@@ -7367,13 +7363,13 @@
                         
                         jit.addLinkTask(
                             [=] (LinkBuffer& linkBuffer) {
-                                CodeLocationNearCall callLocation = linkBuffer.locationOfNearCall(call);
-                                CodeLocationLabel slowPathLocation = linkBuffer.locationOf(slowPath, NearCodePtrTag);
+                                CodeLocationNearCall<JSEntryPtrTag> callLocation = linkBuffer.locationOfNearCall<JSEntryPtrTag>(call);
+                                CodeLocationLabel<JSEntryPtrTag> slowPathLocation = linkBuffer.locationOf<JSEntryPtrTag>(slowPath);
                                 
                                 linkBuffer.link(call, slowPathLocation);
                                 
                                 callLinkInfo->setCallLocations(
-                                    CodeLocationLabel(),
+                                    CodeLocationLabel<JSEntryPtrTag>(),
                                     slowPathLocation,
                                     callLocation);
                             });
@@ -7492,15 +7488,13 @@
 
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
-                        PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
-                        MacroAssemblerCodePtr linkCall =
-                            vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag);
-                        linkBuffer.link(slowCall, FunctionPtr(linkCall));
+                        MacroAssemblerCodePtr<JITThunkPtrTag> linkCall = vm->getCTIStub(linkCallThunkGenerator).code();
+                        linkBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(linkCall));
 
                         callLinkInfo->setCallLocations(
-                            CodeLocationLabel(linkBuffer.locationOfNearCall(slowCall)),
-                            CodeLocationLabel(linkBuffer.locationOf(targetToCheck)),
-                            linkBuffer.locationOfNearCall(fastCall));
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(slowCall)),
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(targetToCheck)),
+                            linkBuffer.locationOfNearCall<JSEntryPtrTag>(fastCall));
                     });
             });
     }
@@ -7637,9 +7631,8 @@
                 };
 
                 auto callWithExceptionCheck = [&] (void* callee) {
-                    PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-                    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(callee, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
-                    jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
+                    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(callee)), GPRInfo::nonPreservedNonArgumentGPR0);
+                    jit.call(GPRInfo::nonPreservedNonArgumentGPR0, OperationPtrTag);
                     exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
                 };
 
@@ -7793,15 +7786,13 @@
                 
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
-                        PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
-                        MacroAssemblerCodePtr linkCall =
-                            vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag);
-                        linkBuffer.link(slowCall, FunctionPtr(linkCall));
+                        MacroAssemblerCodePtr<JITThunkPtrTag> linkCall = vm->getCTIStub(linkCallThunkGenerator).code();
+                        linkBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(linkCall));
                         
                         callLinkInfo->setCallLocations(
-                            CodeLocationLabel(linkBuffer.locationOfNearCall(slowCall)),
-                            CodeLocationLabel(linkBuffer.locationOf(targetToCheck)),
-                            linkBuffer.locationOfNearCall(fastCall));
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(slowCall)),
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(targetToCheck)),
+                            linkBuffer.locationOfNearCall<JSEntryPtrTag>(fastCall));
                     });
             });
 
@@ -7978,9 +7969,8 @@
                 RELEASE_ASSERT(!allocator.numberOfReusedRegisters());
 
                 auto callWithExceptionCheck = [&] (void* callee) {
-                    PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-                    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(callee, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
-                    jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
+                    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(callee)), GPRInfo::nonPreservedNonArgumentGPR0);
+                    jit.call(GPRInfo::nonPreservedNonArgumentGPR0, OperationPtrTag);
                     exceptions->append(jit.emitExceptionCheck(*vm, AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
                 };
 
@@ -8078,15 +8068,13 @@
                 
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
-                        PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
-                        MacroAssemblerCodePtr linkCall =
-                            vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag);
-                        linkBuffer.link(slowCall, FunctionPtr(linkCall));
+                        MacroAssemblerCodePtr<JITThunkPtrTag> linkCall = vm->getCTIStub(linkCallThunkGenerator).code();
+                        linkBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(linkCall));
                         
                         callLinkInfo->setCallLocations(
-                            CodeLocationLabel(linkBuffer.locationOfNearCall(slowCall)),
-                            CodeLocationLabel(linkBuffer.locationOf(targetToCheck)),
-                            linkBuffer.locationOfNearCall(fastCall));
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOfNearCall<JSEntryPtrTag>(slowCall)),
+                            CodeLocationLabel<JSEntryPtrTag>(linkBuffer.locationOf<JSEntryPtrTag>(targetToCheck)),
+                            linkBuffer.locationOfNearCall<JSEntryPtrTag>(fastCall));
                     });
             });
 
@@ -8168,9 +8156,8 @@
                 requiredBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), requiredBytes);
                 jit.subPtr(CCallHelpers::TrustedImm32(requiredBytes), CCallHelpers::stackPointerRegister);
                 jit.setupArguments<decltype(operationCallEval)>(GPRInfo::regT1);
-                PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-                jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationCallEval, tag)), GPRInfo::nonPreservedNonArgumentGPR0);
-                jit.call(GPRInfo::nonPreservedNonArgumentGPR0, tag);
+                jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationCallEval)), GPRInfo::nonPreservedNonArgumentGPR0);
+                jit.call(GPRInfo::nonPreservedNonArgumentGPR0, OperationPtrTag);
                 exceptions->append(jit.emitExceptionCheck(state->vm(), AssemblyHelpers::NormalExceptionCheck, AssemblyHelpers::FarJumpWidth));
                 
                 CCallHelpers::Jump done = jit.branchTest64(CCallHelpers::NonZero, GPRInfo::returnValueGPR);
@@ -8852,8 +8839,8 @@
                 jit.addLinkTask(
                     [=] (LinkBuffer& linkBuffer) {
                         JumpReplacement jumpReplacement(
-                            linkBuffer.locationOf(label),
-                            linkBuffer.locationOf(handle->label));
+                            linkBuffer.locationOf<JSInternalPtrTag>(label),
+                            linkBuffer.locationOf<OSRExitPtrTag>(handle->label));
                         jitCode->common.jumpReplacements.append(jumpReplacement);
                     });
             });
@@ -9697,18 +9684,18 @@
 
                                 jit.addLinkTask(
                                     [=] (LinkBuffer& linkBuffer) {
-                                        CodeLocationLabel start = linkBuffer.locationOf(jump);
+                                        CodeLocationLabel<JITStubRoutinePtrTag> start = linkBuffer.locationOf<JITStubRoutinePtrTag>(jump);
                                         stubInfo->patch.start = start;
                                         ptrdiff_t inlineSize = MacroAssembler::differenceBetweenCodePtr(
-                                            start, linkBuffer.locationOf(done));
+                                            start, linkBuffer.locationOf<JSEntryPtrTag>(done));
                                         RELEASE_ASSERT(inlineSize >= 0);
                                         stubInfo->patch.inlineSize = inlineSize;
 
                                         stubInfo->patch.deltaFromStartToSlowPathCallLocation = MacroAssembler::differenceBetweenCodePtr(
-                                            start, linkBuffer.locationOf(slowPathCall));
+                                            start, linkBuffer.locationOf<JSEntryPtrTag>(slowPathCall));
 
                                         stubInfo->patch.deltaFromStartToSlowPathStart = MacroAssembler::differenceBetweenCodePtr(
-                                            start, linkBuffer.locationOf(slowPathBegin));
+                                            start, linkBuffer.locationOf<JSEntryPtrTag>(slowPathBegin));
 
                                     });
                             });
@@ -11918,7 +11905,7 @@
                 CCallHelpers::JumpList failureCases = domJIT->generator()->run(jit, domJITParams);
 
                 jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-                    linkBuffer.link(failureCases, linkBuffer.locationOf(handle->label));
+                    linkBuffer.link(failureCases, linkBuffer.locationOf<NoPtrTag>(handle->label));
                 });
             });
         patchpoint->effects = Effects::forCheck();
@@ -11957,6 +11944,7 @@
 
         unsigned argumentCountIncludingThis = signature->argumentCount + 1;
         LValue result;
+        assertIsTaggedWith(reinterpret_cast<void*>(signature->unsafeFunction), CFunctionPtrTag);
         switch (argumentCountIncludingThis) {
         case 1:
             result = vmCall(Int64, m_out.operation(reinterpret_cast<J_JITOperation_EP>(signature->unsafeFunction)), m_callFrame, operands[0]);
@@ -11983,7 +11971,7 @@
             // Since the getter does not have code setting topCallFrame, As is the same to IC, we should set topCallFrame in caller side.
             m_out.storePtr(m_callFrame, m_out.absolute(&vm().topCallFrame));
             setJSValue(
-                vmCall(Int64, m_out.operation(m_node->callDOMGetterData()->customAccessorGetter),
+                vmCall(Int64, m_out.operation(m_node->callDOMGetterData()->customAccessorGetter.retaggedExecutableAddress<CFunctionPtrTag>()),
                     m_callFrame, lowCell(m_node->child1()), m_out.constIntPtr(m_graph.identifiers()[m_node->callDOMGetterData()->identifierNumber])));
             return;
         }
@@ -13989,26 +13977,21 @@
 
                         jit.addLinkTask(
                             [=] (LinkBuffer& linkBuffer) {
-                                PtrTag thunkTag = ptrTag(FTLLazySlowPathPtrTag, vm);
-                                linkBuffer.link(
-                                    generatorJump, CodeLocationLabel(
-                                        vm->getCTIStub(
-                                            lazySlowPathGenerationThunkGenerator).retaggedCode(thunkTag, NearCodePtrTag)));
+                                linkBuffer.link(generatorJump,
+                                    CodeLocationLabel<JITThunkPtrTag>(vm->getCTIStub(lazySlowPathGenerationThunkGenerator).code()));
                                 
                                 std::unique_ptr<LazySlowPath> lazySlowPath = std::make_unique<LazySlowPath>();
 
-                                PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(lazySlowPath.get()));
-                                CodeLocationJump linkedPatchableJump = CodeLocationJump(
-                                    linkBuffer.locationOf(patchableJump, slowPathTag));
+                                auto linkedPatchableJump = CodeLocationJump<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(patchableJump));
 
-                                CodeLocationLabel linkedDone = linkBuffer.locationOf(done, slowPathTag);
+                                CodeLocationLabel<JSEntryPtrTag> linkedDone = linkBuffer.locationOf<JSEntryPtrTag>(done);
 
                                 CallSiteIndex callSiteIndex =
                                     jitCode->common.addUniqueCallSiteIndex(origin);
                                     
                                 lazySlowPath->initialize(
                                         linkedPatchableJump, linkedDone,
-                                        exceptionTarget->label(linkBuffer, slowPathTag), usedRegisters,
+                                        exceptionTarget->label(linkBuffer), usedRegisters,
                                         callSiteIndex, generator);
                                     
                                 jitCode->lazySlowPaths[index] = WTFMove(lazySlowPath);
diff --git a/Source/JavaScriptCore/ftl/FTLOSRExit.cpp b/Source/JavaScriptCore/ftl/FTLOSRExit.cpp
index f537f56..328f0c8 100644
--- a/Source/JavaScriptCore/ftl/FTLOSRExit.cpp
+++ b/Source/JavaScriptCore/ftl/FTLOSRExit.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -108,7 +108,7 @@
 {
 }
 
-CodeLocationJump OSRExit::codeLocationForRepatch(CodeBlock* ftlCodeBlock) const
+CodeLocationJump<JSInternalPtrTag> OSRExit::codeLocationForRepatch(CodeBlock* ftlCodeBlock) const
 {
     UNUSED_PARAM(ftlCodeBlock);
     return m_patchableJump;
diff --git a/Source/JavaScriptCore/ftl/FTLOSRExit.h b/Source/JavaScriptCore/ftl/FTLOSRExit.h
index d17909b..7c02fb1 100644
--- a/Source/JavaScriptCore/ftl/FTLOSRExit.h
+++ b/Source/JavaScriptCore/ftl/FTLOSRExit.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013-2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -119,12 +119,12 @@
     OSRExit(OSRExitDescriptor*, ExitKind, CodeOrigin, CodeOrigin codeOriginForExitProfile, bool wasHoisted);
 
     OSRExitDescriptor* m_descriptor;
-    MacroAssemblerCodeRef m_code;
+    MacroAssemblerCodeRef<OSRExitPtrTag> m_code;
     // This tells us where to place a jump.
-    CodeLocationJump m_patchableJump;
+    CodeLocationJump<JSInternalPtrTag> m_patchableJump;
     Vector<B3::ValueRep> m_valueReps;
 
-    CodeLocationJump codeLocationForRepatch(CodeBlock* ftlCodeBlock) const;
+    CodeLocationJump<JSInternalPtrTag> codeLocationForRepatch(CodeBlock* ftlCodeBlock) const;
     void considerAddingAsFrequentExitSite(CodeBlock* profiledCodeBlock)
     {
         OSRExitBase::considerAddingAsFrequentExitSite(profiledCodeBlock, ExitFromFTL);
diff --git a/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp b/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
index 7b43dfa..9922a6e 100644
--- a/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
+++ b/Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
@@ -176,7 +176,7 @@
 }
 
 static void compileStub(
-    unsigned exitID, JITCode* jitCode, OSRExit& exit, VM* vm, CodeBlock* codeBlock, PtrTag exitSiteTag)
+    unsigned exitID, JITCode* jitCode, OSRExit& exit, VM* vm, CodeBlock* codeBlock)
 {
     // This code requires framePointerRegister is the same as callFrameRegister
     static_assert(MacroAssembler::framePointerRegister == GPRInfo::callFrameRegister, "MacroAssembler::framePointerRegister and GPRInfo::callFrameRegister must be the same");
@@ -338,9 +338,8 @@
             jit.setupArguments<decltype(operationMaterializeObjectInOSR)>(
                 CCallHelpers::TrustedImmPtr(materialization),
                 CCallHelpers::TrustedImmPtr(materializationArguments));
-            PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-            jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationMaterializeObjectInOSR, tag)), GPRInfo::nonArgGPR0);
-            jit.call(GPRInfo::nonArgGPR0, tag);
+            jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationMaterializeObjectInOSR)), GPRInfo::nonArgGPR0);
+            jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
             jit.storePtr(GPRInfo::returnValueGPR, materializationToPointer.get(materialization));
 
             // Let everyone know that we're done.
@@ -367,9 +366,8 @@
             CCallHelpers::TrustedImmPtr(materialization),
             CCallHelpers::TrustedImmPtr(materializationToPointer.get(materialization)),
             CCallHelpers::TrustedImmPtr(materializationArguments));
-        PtrTag tag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-        jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(operationPopulateObjectInOSR, tag)), GPRInfo::nonArgGPR0);
-        jit.call(GPRInfo::nonArgGPR0, tag);
+        jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationPopulateObjectInOSR)), GPRInfo::nonArgGPR0);
+        jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
     }
 
     // Save all state from wherever the exit data tells us it was, into the appropriate place in
@@ -496,7 +494,7 @@
     LinkBuffer patchBuffer(jit, codeBlock);
     exit.m_code = FINALIZE_CODE_IF(
         shouldDumpDisassembly() || Options::verboseOSR() || Options::verboseFTLOSRExit(),
-        patchBuffer, exitSiteTag,
+        patchBuffer, OSRExitPtrTag,
         "FTL OSR exit #%u (%s, %s) from %s, with operands = %s",
             exitID, toCString(exit.m_codeOrigin).data(),
             exitKindToString(exit.m_kind), toCString(*codeBlock).data(),
@@ -544,13 +542,12 @@
 
     prepareCodeOriginForOSRExit(exec, exit.m_codeOrigin);
 
-    PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, &exit);
-    compileStub(exitID, jitCode, exit, &vm, codeBlock, thunkTag);
+    compileStub(exitID, jitCode, exit, &vm, codeBlock);
 
     MacroAssembler::repatchJump(
-        exit.codeLocationForRepatch(codeBlock), CodeLocationLabel(exit.m_code.retaggedCode(thunkTag, NearCodePtrTag)));
+        exit.codeLocationForRepatch(codeBlock), CodeLocationLabel<OSRExitPtrTag>(exit.m_code.code()));
     
-    return exit.m_code.retaggedCode(thunkTag, bitwise_cast<PtrTag>(exec)).executableAddress();
+    return exit.m_code.code().executableAddress();
 }
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLOSRExitHandle.cpp b/Source/JavaScriptCore/ftl/FTLOSRExitHandle.cpp
index 2d2401b..59d22ec 100644
--- a/Source/JavaScriptCore/ftl/FTLOSRExitHandle.cpp
+++ b/Source/JavaScriptCore/ftl/FTLOSRExitHandle.cpp
@@ -47,14 +47,13 @@
     VM& vm = state.vm();
     jit.addLinkTask(
         [self, jump, myLabel, compilation, &vm] (LinkBuffer& linkBuffer) {
-            self->exit.m_patchableJump = CodeLocationJump(linkBuffer.locationOf(jump));
+            self->exit.m_patchableJump = CodeLocationJump<JSInternalPtrTag>(linkBuffer.locationOf<JSInternalPtrTag>(jump));
 
-            PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, &vm);
             linkBuffer.link(
                 jump.m_jump,
-                CodeLocationLabel(vm.getCTIStub(osrExitGenerationThunkGenerator).retaggedCode(thunkTag, NearCodePtrTag)));
+                CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(osrExitGenerationThunkGenerator).code()));
             if (compilation)
-                compilation->addOSRExitSite({ linkBuffer.locationOf(myLabel).executableAddress() });
+                compilation->addOSRExitSite({ linkBuffer.locationOf<JSInternalPtrTag>(myLabel) });
         });
 }
 
diff --git a/Source/JavaScriptCore/ftl/FTLOperations.cpp b/Source/JavaScriptCore/ftl/FTLOperations.cpp
index 47aae79..7508bb0 100644
--- a/Source/JavaScriptCore/ftl/FTLOperations.cpp
+++ b/Source/JavaScriptCore/ftl/FTLOperations.cpp
@@ -581,8 +581,7 @@
     LazySlowPath& lazySlowPath = *jitCode->lazySlowPaths[index];
     lazySlowPath.generate(codeBlock);
 
-    PtrTag slowPathTag = ptrTag(FTLLazySlowPathPtrTag, bitwise_cast<PtrTag>(&lazySlowPath));
-    return lazySlowPath.stub().retaggedCode(slowPathTag, bitwise_cast<PtrTag>(exec)).executableAddress();
+    return lazySlowPath.stub().code().executableAddress();
 }
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLPatchpointExceptionHandle.cpp b/Source/JavaScriptCore/ftl/FTLPatchpointExceptionHandle.cpp
index e6c4c25..7294b1b 100644
--- a/Source/JavaScriptCore/ftl/FTLPatchpointExceptionHandle.cpp
+++ b/Source/JavaScriptCore/ftl/FTLPatchpointExceptionHandle.cpp
@@ -91,7 +91,7 @@
                     HandlerInfo newHandler = handler;
                     newHandler.start = callSiteIndex.bits();
                     newHandler.end = callSiteIndex.bits() + 1;
-                    newHandler.nativeCode = linkBuffer.locationOf(handle->label, ExceptionHandlerPtrTag);
+                    newHandler.nativeCode = linkBuffer.locationOf<ExceptionHandlerPtrTag>(handle->label);
                     codeBlock->appendExceptionHandler(newHandler);
                 });
         });
diff --git a/Source/JavaScriptCore/ftl/FTLSlowPathCall.cpp b/Source/JavaScriptCore/ftl/FTLSlowPathCall.cpp
index db1df95..74d29a3 100644
--- a/Source/JavaScriptCore/ftl/FTLSlowPathCall.cpp
+++ b/Source/JavaScriptCore/ftl/FTLSlowPathCall.cpp
@@ -113,25 +113,22 @@
     m_jit.addPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded), CCallHelpers::stackPointerRegister);
 }
 
-SlowPathCallKey SlowPathCallContext::keyWithTarget(void* callTarget) const
+SlowPathCallKey SlowPathCallContext::keyWithTarget(FunctionPtr<CFunctionPtrTag> callTarget) const
 {
     return SlowPathCallKey(m_thunkSaveSet, callTarget, m_argumentRegisters, m_offset);
 }
 
-SlowPathCall SlowPathCallContext::makeCall(VM& vm, FunctionPtr callTarget)
+SlowPathCall SlowPathCallContext::makeCall(VM& vm, FunctionPtr<CFunctionPtrTag> callTarget)
 {
-    void* executableAddress = callTarget.executableAddress();
-    assertIsCFunctionPtr(executableAddress);
-    SlowPathCallKey key = keyWithTarget(executableAddress);
-    PtrTag callTag = key.callPtrTag();
-    SlowPathCall result = SlowPathCall(m_jit.call(callTag), key);
+    SlowPathCallKey key = keyWithTarget(callTarget);
+    SlowPathCall result = SlowPathCall(m_jit.call(OperationPtrTag), key);
 
     m_jit.addLinkTask(
         [result, &vm] (LinkBuffer& linkBuffer) {
-            MacroAssemblerCodeRef thunk =
+            MacroAssemblerCodeRef<JITThunkPtrTag> thunk =
                 vm.ftlThunks->getSlowPathCallThunk(result.key());
 
-            linkBuffer.link(result.call(), CodeLocationLabel(thunk.code()));
+            linkBuffer.link(result.call(), CodeLocationLabel<OperationPtrTag>(thunk.retaggedCode<OperationPtrTag>()));
         });
     
     return result;
diff --git a/Source/JavaScriptCore/ftl/FTLSlowPathCall.h b/Source/JavaScriptCore/ftl/FTLSlowPathCall.h
index 5fb0705..fe7e0ca 100644
--- a/Source/JavaScriptCore/ftl/FTLSlowPathCall.h
+++ b/Source/JavaScriptCore/ftl/FTLSlowPathCall.h
@@ -59,10 +59,10 @@
 
     // NOTE: The call that this returns is already going to be linked by the JIT using addLinkTask(),
     // so there is no need for you to link it yourself.
-    SlowPathCall makeCall(VM&, FunctionPtr callTarget);
+    SlowPathCall makeCall(VM&, FunctionPtr<CFunctionPtrTag> callTarget);
 
 private:
-    SlowPathCallKey keyWithTarget(void* callTarget) const;
+    SlowPathCallKey keyWithTarget(FunctionPtr<CFunctionPtrTag> callTarget) const;
     
     RegisterSet m_argumentRegisters;
     RegisterSet m_callingConventionRegisters;
@@ -78,7 +78,7 @@
 template<typename... ArgumentTypes>
 SlowPathCall callOperation(
     VM& vm, const RegisterSet& usedRegisters, CCallHelpers& jit, CCallHelpers::JumpList* exceptionTarget,
-    FunctionPtr function, GPRReg resultGPR, ArgumentTypes... arguments)
+    FunctionPtr<CFunctionPtrTag> function, GPRReg resultGPR, ArgumentTypes... arguments)
 {
     SlowPathCall call;
     {
@@ -94,7 +94,7 @@
 template<typename... ArgumentTypes>
 SlowPathCall callOperation(
     VM& vm, const RegisterSet& usedRegisters, CCallHelpers& jit, CallSiteIndex callSiteIndex,
-    CCallHelpers::JumpList* exceptionTarget, FunctionPtr function, GPRReg resultGPR,
+    CCallHelpers::JumpList* exceptionTarget, FunctionPtr<CFunctionPtrTag> function, GPRReg resultGPR,
     ArgumentTypes... arguments)
 {
     if (callSiteIndex) {
@@ -110,7 +110,7 @@
 template<typename... ArgumentTypes>
 SlowPathCall callOperation(
     State& state, const RegisterSet& usedRegisters, CCallHelpers& jit, CodeOrigin codeOrigin,
-    CCallHelpers::JumpList* exceptionTarget, FunctionPtr function, GPRReg result, ArgumentTypes... arguments)
+    CCallHelpers::JumpList* exceptionTarget, FunctionPtr<CFunctionPtrTag> function, GPRReg result, ArgumentTypes... arguments)
 {
     return callOperation(
         state.vm(), usedRegisters, jit, callSiteIndexForCodeOrigin(state, codeOrigin), exceptionTarget, function,
diff --git a/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.cpp b/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.cpp
index 4cc835d..2d306c6 100644
--- a/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.cpp
+++ b/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -32,7 +32,7 @@
 
 void SlowPathCallKey::dump(PrintStream& out) const
 {
-    out.print("<usedRegisters = ", m_usedRegisters, ", offset = ", m_offset, ", callTarget = ", RawPointer(m_callTarget), ", argumentRegisters = ", m_argumentRegisters, ">");
+    out.print("<usedRegisters = ", m_usedRegisters, ", offset = ", m_offset, ", callTarget = ", RawPointer(m_callTarget.executableAddress()), ", argumentRegisters = ", m_argumentRegisters, ">");
 }
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.h b/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.h
index 67c6d63..88630b4 100644
--- a/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.h
+++ b/Source/JavaScriptCore/ftl/FTLSlowPathCallKey.h
@@ -27,7 +27,7 @@
 
 #if ENABLE(FTL_JIT)
 
-#include "PtrTag.h"
+#include "MacroAssemblerCodeRef.h"
 #include "RegisterSet.h"
 
 namespace JSC { namespace FTL {
@@ -45,29 +45,27 @@
 class SlowPathCallKey {
 public:
     SlowPathCallKey()
-        : m_callTarget(0)
-        , m_offset(0)
+        : m_offset(0)
     {
     }
     
     SlowPathCallKey(
-        const RegisterSet& set, void* callTarget, const RegisterSet& argumentRegisters,
+        const RegisterSet& set, FunctionPtr<CFunctionPtrTag> callTarget, const RegisterSet& argumentRegisters,
         ptrdiff_t offset)
         : m_usedRegisters(set)
-        , m_callTarget(callTarget)
+        , m_callTarget(callTarget.retagged<OperationPtrTag>())
         , m_argumentRegisters(argumentRegisters)
         , m_offset(offset)
     {
     }
     
     const RegisterSet& usedRegisters() const { return m_usedRegisters; }
-    void* callTarget() const { return m_callTarget; }
+    FunctionPtr<OperationPtrTag> callTarget() const { return m_callTarget; }
     const RegisterSet& argumentRegisters() const { return m_argumentRegisters; }
     ptrdiff_t offset() const { return m_offset; }
     
-    SlowPathCallKey withCallTarget(void* callTarget)
+    SlowPathCallKey withCallTarget(FunctionPtr<CFunctionPtrTag> callTarget)
     {
-        assertIsTaggedWith(callTarget, CFunctionPtrTag);
         return SlowPathCallKey(usedRegisters(), callTarget, argumentRegisters(), offset());
     }
     
@@ -78,14 +76,12 @@
     
     SlowPathCallKey(EmptyValueTag)
         : m_usedRegisters(RegisterSet::EmptyValue)
-        , m_callTarget(0)
         , m_offset(0)
     {
     }
     
     SlowPathCallKey(DeletedValueTag)
         : m_usedRegisters(RegisterSet::DeletedValue)
-        , m_callTarget(0)
         , m_offset(0)
     {
     }
@@ -101,19 +97,12 @@
     }
     unsigned hash() const
     {
-        return m_usedRegisters.hash() + PtrHash<void*>::hash(m_callTarget) + m_offset;
-    }
-
-    PtrTag callPtrTag() const
-    {
-        // We should only include factors which are invariant for the same slow path site.
-        // m_callTarget can vary and should be excluded.
-        return ptrTag(FTLSlowPathPtrTag, m_usedRegisters.hash(), m_offset);
+        return m_usedRegisters.hash() + PtrHash<void*>::hash(m_callTarget.executableAddress()) + m_offset;
     }
 
 private:
     RegisterSet m_usedRegisters;
-    void* m_callTarget;
+    FunctionPtr<OperationPtrTag> m_callTarget;
     RegisterSet m_argumentRegisters;
     ptrdiff_t m_offset;
 };
diff --git a/Source/JavaScriptCore/ftl/FTLState.cpp b/Source/JavaScriptCore/ftl/FTLState.cpp
index 2b90aa4..bb2ff3c 100644
--- a/Source/JavaScriptCore/ftl/FTLState.cpp
+++ b/Source/JavaScriptCore/ftl/FTLState.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -42,7 +42,6 @@
 
 State::State(Graph& graph)
     : graph(graph)
-    , generatedFunction(0)
 {
     switch (graph.m_plan.mode) {
     case FTLMode: {
diff --git a/Source/JavaScriptCore/ftl/FTLThunks.cpp b/Source/JavaScriptCore/ftl/FTLThunks.cpp
index 3c000b2..0c6a676 100644
--- a/Source/JavaScriptCore/ftl/FTLThunks.cpp
+++ b/Source/JavaScriptCore/ftl/FTLThunks.cpp
@@ -46,8 +46,8 @@
     NotNeeded 
 };
 
-static MacroAssemblerCodeRef genericGenerationThunkGenerator(
-    VM* vm, FunctionPtr generationFunction, PtrTag resultThunkTag, const char* name, unsigned extraPopsToRestore, FrameAndStackAdjustmentRequirement frameAndStackAdjustmentRequirement)
+static MacroAssemblerCodeRef<JITThunkPtrTag> genericGenerationThunkGenerator(
+    VM* vm, FunctionPtr<CFunctionPtrTag> generationFunction, PtrTag resultTag, const char* name, unsigned extraPopsToRestore, FrameAndStackAdjustmentRequirement frameAndStackAdjustmentRequirement)
 {
     AssemblyHelpers jit(nullptr);
 
@@ -86,9 +86,8 @@
     jit.peek(
         GPRInfo::argumentGPR1,
         (stackMisalignment - MacroAssembler::pushToSaveByteOffset()) / sizeof(void*));
-    PtrTag generatorCallTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-    MacroAssembler::Call functionCall = jit.call(generatorCallTag);
-    
+    MacroAssembler::Call functionCall = jit.call(OperationPtrTag);
+
     // At this point we want to make a tail call to what was returned to us in the
     // returnValueGPR. But at the same time as we do this, we must restore all registers.
     // The way we will accomplish this is by arranging to have the tail call target in the
@@ -117,30 +116,30 @@
     restoreAllRegisters(jit, buffer);
 
 #if CPU(ARM64) && USE(POINTER_PROFILING)
-    jit.untagPtr(AssemblyHelpers::linkRegister, GPRInfo::callFrameRegister);
+    jit.untagPtr(AssemblyHelpers::linkRegister, resultTag);
     jit.tagReturnAddress();
+#else
+    UNUSED_PARAM(resultTag);
 #endif
     jit.ret();
     
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    patchBuffer.link(functionCall, FunctionPtr(generationFunction, generatorCallTag));
-    return FINALIZE_CODE(patchBuffer, resultThunkTag, "%s", name);
+    patchBuffer.link(functionCall, generationFunction.retagged<OperationPtrTag>());
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "%s", name);
 }
 
-MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitGenerationThunkGenerator(VM* vm)
 {
     unsigned extraPopsToRestore = 0;
-    PtrTag thunkTag = ptrTag(FTLOSRExitPtrTag, vm);
     return genericGenerationThunkGenerator(
-        vm, FunctionPtr(compileFTLOSRExit), thunkTag, "FTL OSR exit generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::Needed);
+        vm, compileFTLOSRExit, OSRExitPtrTag, "FTL OSR exit generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::Needed);
 }
 
-MacroAssemblerCodeRef lazySlowPathGenerationThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> lazySlowPathGenerationThunkGenerator(VM* vm)
 {
     unsigned extraPopsToRestore = 1;
-    PtrTag thunkTag = ptrTag(FTLLazySlowPathPtrTag, vm);
     return genericGenerationThunkGenerator(
-        vm, FunctionPtr(compileFTLLazySlowPath), thunkTag, "FTL lazy slow path generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::NotNeeded);
+        vm, compileFTLLazySlowPath, JITStubRoutinePtrTag, "FTL lazy slow path generation thunk", extraPopsToRestore, FrameAndStackAdjustmentRequirement::NotNeeded);
 }
 
 static void registerClobberCheck(AssemblyHelpers& jit, RegisterSet dontClobber)
@@ -171,7 +170,7 @@
     }
 }
 
-MacroAssemblerCodeRef slowPathCallThunkGenerator(const SlowPathCallKey& key)
+MacroAssemblerCodeRef<JITThunkPtrTag> slowPathCallThunkGenerator(const SlowPathCallKey& key)
 {
     AssemblyHelpers jit(nullptr);
     jit.tagReturnAddress();
@@ -204,9 +203,8 @@
     jit.storePtr(GPRInfo::nonArgGPR0, AssemblyHelpers::Address(MacroAssembler::stackPointerRegister, key.offset()));
     
     registerClobberCheck(jit, key.argumentRegisters());
-    
-    PtrTag callTag = ptrTag(FTLOperationPtrTag, nextPtrTagID());
-    AssemblyHelpers::Call call = jit.call(callTag);
+
+    AssemblyHelpers::Call call = jit.call(OperationPtrTag);
 
     jit.loadPtr(AssemblyHelpers::Address(MacroAssembler::stackPointerRegister, key.offset()), GPRInfo::nonPreservedNonReturnGPR);
     jit.restoreReturnAddressBeforeReturn(GPRInfo::nonPreservedNonReturnGPR);
@@ -232,8 +230,8 @@
     jit.ret();
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    patchBuffer.link(call, FunctionPtr(key.callTarget(), callTag));
-    return FINALIZE_CODE(patchBuffer, key.callPtrTag(), "FTL slow path call thunk for %s", toCString(key).data());
+    patchBuffer.link(call, key.callTarget());
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "FTL slow path call thunk for %s", toCString(key).data());
 }
 
 } } // namespace JSC::FTL
diff --git a/Source/JavaScriptCore/ftl/FTLThunks.h b/Source/JavaScriptCore/ftl/FTLThunks.h
index 16024f3..95b9dfe 100644
--- a/Source/JavaScriptCore/ftl/FTLThunks.h
+++ b/Source/JavaScriptCore/ftl/FTLThunks.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -38,36 +38,36 @@
 
 namespace FTL {
 
-MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM*);
-MacroAssemblerCodeRef lazySlowPathGenerationThunkGenerator(VM*);
-MacroAssemblerCodeRef slowPathCallThunkGenerator(const SlowPathCallKey&);
+MacroAssemblerCodeRef<JITThunkPtrTag> osrExitGenerationThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> lazySlowPathGenerationThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> slowPathCallThunkGenerator(const SlowPathCallKey&);
 
 template<typename KeyTypeArgument>
 struct ThunkMap {
     typedef KeyTypeArgument KeyType;
-    typedef HashMap<KeyType, MacroAssemblerCodeRef> ToThunkMap;
-    typedef HashMap<MacroAssemblerCodePtr, KeyType> FromThunkMap;
+    typedef HashMap<KeyType, MacroAssemblerCodeRef<JITThunkPtrTag>> ToThunkMap;
+    typedef HashMap<MacroAssemblerCodePtr<JITThunkPtrTag>, KeyType> FromThunkMap;
     
     ToThunkMap m_toThunk;
     FromThunkMap m_fromThunk;
 };
 
 template<typename MapType, typename GeneratorType>
-MacroAssemblerCodeRef generateIfNecessary(
+MacroAssemblerCodeRef<JITThunkPtrTag> generateIfNecessary(
     MapType& map, const typename MapType::KeyType& key, GeneratorType generator)
 {
     typename MapType::ToThunkMap::iterator iter = map.m_toThunk.find(key);
     if (iter != map.m_toThunk.end())
         return iter->value;
-    
-    MacroAssemblerCodeRef result = generator(key);
+
+    MacroAssemblerCodeRef<JITThunkPtrTag> result = generator(key);
     map.m_toThunk.add(key, result);
     map.m_fromThunk.add(result.code(), key);
     return result;
 }
 
 template<typename MapType>
-typename MapType::KeyType keyForThunk(MapType& map, MacroAssemblerCodePtr ptr)
+typename MapType::KeyType keyForThunk(MapType& map, MacroAssemblerCodePtr<JITThunkPtrTag> ptr)
 {
     typename MapType::FromThunkMap::iterator iter = map.m_fromThunk.find(ptr);
     RELEASE_ASSERT(iter != map.m_fromThunk.end());
@@ -79,13 +79,13 @@
     WTF_MAKE_NONCOPYABLE(Thunks);
 public:
     Thunks() = default;
-    MacroAssemblerCodeRef getSlowPathCallThunk(const SlowPathCallKey& key)
+    MacroAssemblerCodeRef<JITThunkPtrTag> getSlowPathCallThunk(const SlowPathCallKey& key)
     {
         return generateIfNecessary(
             m_slowPathCallThunks, key, slowPathCallThunkGenerator);
     }
-    
-    SlowPathCallKey keyForSlowPathCallThunk(MacroAssemblerCodePtr ptr)
+
+    SlowPathCallKey keyForSlowPathCallThunk(MacroAssemblerCodePtr<JITThunkPtrTag> ptr)
     {
         return keyForThunk(m_slowPathCallThunks, ptr);
     }
diff --git a/Source/JavaScriptCore/interpreter/InterpreterInlines.h b/Source/JavaScriptCore/interpreter/InterpreterInlines.h
index 655b516..319d121 100644
--- a/Source/JavaScriptCore/interpreter/InterpreterInlines.h
+++ b/Source/JavaScriptCore/interpreter/InterpreterInlines.h
@@ -47,7 +47,7 @@
     // The OpcodeID is embedded in the int32_t word preceding the location of
     // the LLInt code for the opcode (see the EMBED_OPCODE_ID_IF_NEEDED macro
     // in LowLevelInterpreter.cpp).
-    MacroAssemblerCodePtr codePtr(removeCodePtrTag<void*>(opcode));
+    auto codePtr = MacroAssemblerCodePtr<BytecodePtrTag>::createFromExecutableAddress(opcode);
     int32_t* opcodeIDAddress = codePtr.dataLocation<int32_t*>() - 1;
     OpcodeID opcodeID = static_cast<OpcodeID>(*opcodeIDAddress);
     ASSERT(opcodeID < NUMBER_OF_BYTECODE_IDS);
diff --git a/Source/JavaScriptCore/jit/AssemblyHelpers.cpp b/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
index 77ca1d7..29ad53e 100644
--- a/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
+++ b/Source/JavaScriptCore/jit/AssemblyHelpers.cpp
@@ -337,9 +337,8 @@
 #else
     move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
 #endif
-    PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-    move(TrustedImmPtr(tagCFunctionPtr(operationExceptionFuzz, tag)), GPRInfo::nonPreservedNonReturnGPR);
-    call(GPRInfo::nonPreservedNonReturnGPR, tag);
+    move(TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationExceptionFuzz)), GPRInfo::nonPreservedNonReturnGPR);
+    call(GPRInfo::nonPreservedNonReturnGPR, OperationPtrTag);
 
     for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
         move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0);
@@ -713,10 +712,9 @@
     Call call = nearCall();
     addLinkTask(
         [=, &vm] (LinkBuffer& linkBuffer) {
-            PtrTag linkTag = ptrTag(LinkVirtualCallPtrTag, &vm);
-            MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, *info);
+            MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, *info);
             info->setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
-            linkBuffer.link(call, CodeLocationLabel(virtualThunk.retaggedCode(linkTag, NearCodePtrTag)));
+            linkBuffer.link(call, CodeLocationLabel<JITStubRoutinePtrTag>(virtualThunk.code()));
         });
 }
 
@@ -933,9 +931,8 @@
 #else
 #error "JIT not supported on this platform."
 #endif
-    PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-    move(TrustedImmPtr(tagCFunctionPtr(function, tag)), scratch);
-    call(scratch, tag);
+    move(TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(function)), scratch);
+    call(scratch, OperationPtrTag);
 
     move(TrustedImmPtr(scratchBuffer->addressOfActiveLength()), GPRInfo::regT0);
     storePtr(TrustedImmPtr(nullptr), GPRInfo::regT0);
diff --git a/Source/JavaScriptCore/jit/CCallHelpers.cpp b/Source/JavaScriptCore/jit/CCallHelpers.cpp
index f43e443..74d740f 100644
--- a/Source/JavaScriptCore/jit/CCallHelpers.cpp
+++ b/Source/JavaScriptCore/jit/CCallHelpers.cpp
@@ -59,9 +59,8 @@
     loadPtr(Address(scratch1NonArgGPR), shadowPacket);
     Jump ok = branchPtr(Below, shadowPacket, TrustedImmPtr(vm.shadowChicken().logEnd()));
     setupArguments<decltype(operationProcessShadowChickenLog)>();
-    PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-    move(TrustedImmPtr(tagCFunctionPtr(operationProcessShadowChickenLog, tag)), scratch1NonArgGPR);
-    call(scratch1NonArgGPR, tag);
+    move(TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationProcessShadowChickenLog)), scratch1NonArgGPR);
+    call(scratch1NonArgGPR, OperationPtrTag);
     move(TrustedImmPtr(vm.shadowChicken().addressOfLogCursor()), scratch1NonArgGPR);
     loadPtr(Address(scratch1NonArgGPR), shadowPacket);
     ok.link(this);
diff --git a/Source/JavaScriptCore/jit/ExecutableAllocator.cpp b/Source/JavaScriptCore/jit/ExecutableAllocator.cpp
index 0575aa5..cf06e40 100644
--- a/Source/JavaScriptCore/jit/ExecutableAllocator.cpp
+++ b/Source/JavaScriptCore/jit/ExecutableAllocator.cpp
@@ -206,7 +206,7 @@
             return;
 
         // Assemble a thunk that will serve as the means for writing into the JIT region.
-        MacroAssemblerCodeRef writeThunk = jitWriteThunkGenerator(reinterpret_cast<void*>(writableAddr), stubBase, stubSize);
+        MacroAssemblerCodeRef<JITThunkPtrTag> writeThunk = jitWriteThunkGenerator(reinterpret_cast<void*>(writableAddr), stubBase, stubSize);
 
         int result = 0;
 
@@ -231,7 +231,7 @@
     }
 
 #if CPU(ARM64) && USE(EXECUTE_ONLY_JIT_WRITE_FUNCTION)
-    MacroAssemblerCodeRef jitWriteThunkGenerator(void* writableAddr, void* stubBase, size_t stubSize)
+    MacroAssemblerCodeRef<JITThunkPtrTag> jitWriteThunkGenerator(void* writableAddr, void* stubBase, size_t stubSize)
     {
         using namespace ARM64Registers;
         using TrustedImm32 = MacroAssembler::TrustedImm32;
@@ -299,8 +299,7 @@
         // to appear in the console or anywhere in memory, via the PrintStream buffer.
         // The second is we can't guarantee that the code is readable when using the
         // asyncDisassembly option as our caller will set our pages execute only.
-        PtrTag tag = ptrTag(JITWriteThunkPtrTag, &jitWriteSeparateHeapsFunction);
-        return linkBuffer.finalizeCodeWithoutDisassembly(tag);
+        return linkBuffer.finalizeCodeWithoutDisassembly<JITThunkPtrTag>();
     }
 #else // CPU(ARM64) && USE(EXECUTE_ONLY_JIT_WRITE_FUNCTION)
     static void genericWriteToJITRegion(off_t offset, const void* data, size_t dataSize)
@@ -308,15 +307,18 @@
         memcpy((void*)(startOfFixedWritableMemoryPool + offset), data, dataSize);
     }
 
-    MacroAssemblerCodeRef jitWriteThunkGenerator(void* address, void*, size_t)
+    MacroAssemblerCodeRef<JITThunkPtrTag> jitWriteThunkGenerator(void* address, void*, size_t)
     {
         startOfFixedWritableMemoryPool = reinterpret_cast<uintptr_t>(address);
-        uintptr_t function = (uintptr_t)((void*)&genericWriteToJITRegion);
+        void* function = reinterpret_cast<void*>(&genericWriteToJITRegion);
 #if CPU(ARM_THUMB2)
         // Handle thumb offset
-        function -= 1;
+        uintptr_t functionAsInt = reinterpret_cast<uintptr_t>(function);
+        functionAsInt -= 1;
+        function = reinterpret_cast<void*>(functionAsInt);
 #endif
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr((void*)function));
+        auto codePtr = MacroAssemblerCodePtr<JITThunkPtrTag>(tagCFunctionPtr<JITThunkPtrTag>(function));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(codePtr);
     }
 #endif
 
diff --git a/Source/JavaScriptCore/jit/ExecutableAllocator.h b/Source/JavaScriptCore/jit/ExecutableAllocator.h
index 9585d31..9516ddb 100644
--- a/Source/JavaScriptCore/jit/ExecutableAllocator.h
+++ b/Source/JavaScriptCore/jit/ExecutableAllocator.h
@@ -91,8 +91,7 @@
             // Use execute-only write thunk for writes inside the JIT region. This is a variant of
             // memcpy that takes an offset into the JIT region as its destination (first) parameter.
             off_t offset = (off_t)((uintptr_t)dst - startOfFixedExecutableMemoryPool);
-            PtrTag tag = ptrTag(JITWriteThunkPtrTag, &jitWriteSeparateHeapsFunction);
-            retagCodePtr(jitWriteSeparateHeapsFunction, tag, CFunctionPtrTag)(offset, src, n);
+            retagCodePtr<JITThunkPtrTag, CFunctionPtrTag>(jitWriteSeparateHeapsFunction)(offset, src, n);
             return dst;
         }
     }
diff --git a/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp b/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp
index bab6de1..2aa17f4 100644
--- a/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp
+++ b/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -41,7 +41,7 @@
 namespace JSC {
 
 GCAwareJITStubRoutine::GCAwareJITStubRoutine(
-    const MacroAssemblerCodeRef& code, VM& vm)
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code, VM& vm)
     : JITStubRoutine(code)
     , m_mayBeExecuting(false)
     , m_isJettisoned(false)
@@ -82,7 +82,7 @@
 }
 
 MarkingGCAwareJITStubRoutine::MarkingGCAwareJITStubRoutine(
-    const MacroAssemblerCodeRef& code, VM& vm, const JSCell* owner,
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code, VM& vm, const JSCell* owner,
     const Vector<JSCell*>& cells)
     : GCAwareJITStubRoutine(code, vm)
     , m_cells(cells.size())
@@ -103,7 +103,7 @@
 
 
 GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler(
-    const MacroAssemblerCodeRef& code, VM& vm,  const JSCell* owner, const Vector<JSCell*>& cells,
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code, VM& vm,  const JSCell* owner, const Vector<JSCell*>& cells,
     CodeBlock* codeBlockForExceptionHandlers, CallSiteIndex exceptionHandlerCallSiteIndex)
     : MarkingGCAwareJITStubRoutine(code, vm, owner, cells)
     , m_codeBlockWithExceptionHandler(codeBlockForExceptionHandlers)
@@ -133,7 +133,7 @@
 
 
 Ref<JITStubRoutine> createJITStubRoutine(
-    const MacroAssemblerCodeRef& code,
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code,
     VM& vm,
     const JSCell* owner,
     bool makesCalls,
diff --git a/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h b/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h
index 9e73ae5..f5d0f61 100644
--- a/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h
+++ b/Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2014, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -49,7 +49,7 @@
 // list which does not get reclaimed all at once).
 class GCAwareJITStubRoutine : public JITStubRoutine {
 public:
-    GCAwareJITStubRoutine(const MacroAssemblerCodeRef&, VM&);
+    GCAwareJITStubRoutine(const MacroAssemblerCodeRef<JITStubRoutinePtrTag>&, VM&);
     virtual ~GCAwareJITStubRoutine();
     
     void markRequiredObjects(SlotVisitor& visitor)
@@ -76,7 +76,7 @@
 class MarkingGCAwareJITStubRoutine : public GCAwareJITStubRoutine {
 public:
     MarkingGCAwareJITStubRoutine(
-        const MacroAssemblerCodeRef&, VM&, const JSCell* owner, const Vector<JSCell*>&);
+        const MacroAssemblerCodeRef<JITStubRoutinePtrTag>&, VM&, const JSCell* owner, const Vector<JSCell*>&);
     virtual ~MarkingGCAwareJITStubRoutine();
     
 protected:
@@ -94,7 +94,7 @@
 public:
     typedef GCAwareJITStubRoutine Base;
 
-    GCAwareJITStubRoutineWithExceptionHandler(const MacroAssemblerCodeRef&, VM&, const JSCell* owner, const Vector<JSCell*>&, CodeBlock*, CallSiteIndex);
+    GCAwareJITStubRoutineWithExceptionHandler(const MacroAssemblerCodeRef<JITStubRoutinePtrTag>&, VM&, const JSCell* owner, const Vector<JSCell*>&, CodeBlock*, CallSiteIndex);
 
     void aboutToDie() override;
     void observeZeroRefCount() override;
@@ -111,7 +111,7 @@
 // after the first call to C++ or JS.
 // 
 // Ref<JITStubRoutine> createJITStubRoutine(
-//    const MacroAssemblerCodeRef& code,
+//    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code,
 //    VM& vm,
 //    const JSCell* owner,
 //    bool makesCalls,
@@ -124,7 +124,7 @@
 // way.
 
 Ref<JITStubRoutine> createJITStubRoutine(
-    const MacroAssemblerCodeRef&, VM&, const JSCell* owner, bool makesCalls,
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>&, VM&, const JSCell* owner, bool makesCalls,
     const Vector<JSCell*>& = { }, 
     CodeBlock* codeBlockForExceptionHandlers = nullptr, CallSiteIndex exceptionHandlingCallSiteIndex = CallSiteIndex(std::numeric_limits<unsigned>::max()));
 
diff --git a/Source/JavaScriptCore/jit/JIT.cpp b/Source/JavaScriptCore/jit/JIT.cpp
index 188bfdd..b74b92e 100644
--- a/Source/JavaScriptCore/jit/JIT.cpp
+++ b/Source/JavaScriptCore/jit/JIT.cpp
@@ -67,11 +67,11 @@
 Seconds totalFTLDFGCompileTime;
 Seconds totalFTLB3CompileTime;
 
-void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
+void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
 {
     MacroAssembler::repatchCall(
-        CodeLocationCall(MacroAssemblerCodePtr(returnAddress)),
-        newCalleeFunction);
+        CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)),
+        newCalleeFunction.retagged<OperationPtrTag>());
 }
 
 JIT::JIT(VM* vm, CodeBlock* codeBlock, unsigned loopOSREntryBytecodeOffset)
@@ -738,7 +738,7 @@
             addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister);
         branchTest32(Zero, returnValueGPR).linkTo(beginLabel, this);
         move(returnValueGPR, GPRInfo::argumentGPR0);
-        emitNakedCall(m_vm->getCTIStub(arityFixupGenerator).retaggedCode(ptrTag(ArityFixupPtrTag, m_vm), NearCodePtrTag));
+        emitNakedCall(m_vm->getCTIStub(arityFixupGenerator).retaggedCode<NoPtrTag>());
 
 #if !ASSERT_DISABLED
         m_bytecodeOffset = std::numeric_limits<unsigned>::max(); // Reset this, in order to guard its use with ASSERTs.
@@ -788,27 +788,25 @@
             ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
 
             auto* simpleJumpTable = record.jumpTable.simpleJumpTable;
-            PtrTag tag = ptrTag(SwitchTablePtrTag, simpleJumpTable);
-            simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset], tag);
+            simpleJumpTable->ctiDefault = patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
 
             for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
                 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
                 simpleJumpTable->ctiOffsets[j] = offset
-                    ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset], tag)
+                    ? patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + offset])
                     : simpleJumpTable->ctiDefault;
             }
         } else {
             ASSERT(record.type == SwitchRecord::String);
 
             auto* stringJumpTable = record.jumpTable.stringJumpTable;
-            PtrTag tag = ptrTag(SwitchTablePtrTag, stringJumpTable);
             stringJumpTable->ctiDefault =
-                patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset], tag);
+                patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + record.defaultOffset]);
 
             for (auto& location : stringJumpTable->offsetTable.values()) {
                 unsigned offset = location.branchOffset;
                 location.ctiOffset = offset
-                    ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset], tag)
+                    ? patchBuffer.locationOf<JSSwitchPtrTag>(m_labels[bytecodeOffset + offset])
                     : stringJumpTable->ctiDefault;
             }
         }
@@ -816,7 +814,8 @@
 
     for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
         HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
-        handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target], ExceptionHandlerPtrTag);
+        // FIXME: <rdar://problem/39433318>.
+        handler.nativeCode = patchBuffer.locationOf<ExceptionHandlerPtrTag>(m_labels[handler.target]);
     }
 
     for (auto& record : m_calls) {
@@ -832,18 +831,18 @@
         m_putByIds[i].finalize(patchBuffer);
 
     if (m_byValCompilationInfo.size()) {
-        CodeLocationLabel exceptionHandler = patchBuffer.locationOf(m_exceptionHandler, ExceptionHandlerPtrTag);
+        CodeLocationLabel<ExceptionHandlerPtrTag> exceptionHandler = patchBuffer.locationOf<ExceptionHandlerPtrTag>(m_exceptionHandler);
 
         for (const auto& byValCompilationInfo : m_byValCompilationInfo) {
             PatchableJump patchableNotIndexJump = byValCompilationInfo.notIndexJump;
-            CodeLocationJump notIndexJump = CodeLocationJump();
+            auto notIndexJump = CodeLocationJump<JSEntryPtrTag>();
             if (Jump(patchableNotIndexJump).isSet())
-                notIndexJump = CodeLocationJump(patchBuffer.locationOf(patchableNotIndexJump));
-            CodeLocationJump badTypeJump = CodeLocationJump(patchBuffer.locationOf(byValCompilationInfo.badTypeJump));
-            CodeLocationLabel doneTarget = patchBuffer.locationOf(byValCompilationInfo.doneTarget);
-            CodeLocationLabel nextHotPathTarget = patchBuffer.locationOf(byValCompilationInfo.nextHotPathTarget);
-            CodeLocationLabel slowPathTarget = patchBuffer.locationOf(byValCompilationInfo.slowPathTarget);
-            CodeLocationCall returnAddress = patchBuffer.locationOf(byValCompilationInfo.returnAddress);
+                notIndexJump = CodeLocationJump<JSEntryPtrTag>(patchBuffer.locationOf<JSEntryPtrTag>(patchableNotIndexJump));
+            auto badTypeJump = CodeLocationJump<JSEntryPtrTag>(patchBuffer.locationOf<JSEntryPtrTag>(byValCompilationInfo.badTypeJump));
+            CodeLocationLabel<NoPtrTag> doneTarget = patchBuffer.locationOf<NoPtrTag>(byValCompilationInfo.doneTarget);
+            CodeLocationLabel<NoPtrTag> nextHotPathTarget = patchBuffer.locationOf<NoPtrTag>(byValCompilationInfo.nextHotPathTarget);
+            CodeLocationLabel<NoPtrTag> slowPathTarget = patchBuffer.locationOf<NoPtrTag>(byValCompilationInfo.slowPathTarget);
+            CodeLocationCall<NoPtrTag> returnAddress = patchBuffer.locationOf<NoPtrTag>(byValCompilationInfo.returnAddress);
 
             *byValCompilationInfo.byValInfo = ByValInfo(
                 byValCompilationInfo.bytecodeIndex,
@@ -861,22 +860,20 @@
     for (auto& compilationInfo : m_callCompilationInfo) {
         CallLinkInfo& info = *compilationInfo.callLinkInfo;
         info.setCallLocations(
-            CodeLocationLabel(patchBuffer.locationOfNearCall(compilationInfo.callReturnLocation)),
-            CodeLocationLabel(patchBuffer.locationOf(compilationInfo.hotPathBegin)),
-            patchBuffer.locationOfNearCall(compilationInfo.hotPathOther));
+            CodeLocationLabel<JSEntryPtrTag>(patchBuffer.locationOfNearCall<JSEntryPtrTag>(compilationInfo.callReturnLocation)),
+            CodeLocationLabel<JSEntryPtrTag>(patchBuffer.locationOf<JSEntryPtrTag>(compilationInfo.hotPathBegin)),
+            patchBuffer.locationOfNearCall<JSEntryPtrTag>(compilationInfo.hotPathOther));
     }
 
     JITCodeMap jitCodeMap;
     for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) {
-        if (m_labels[bytecodeOffset].isSet()) {
-            PtrTag tag = ptrTag(CodePtrTag, m_codeBlock, bytecodeOffset);
-            jitCodeMap.append(bytecodeOffset, patchBuffer.locationOf(m_labels[bytecodeOffset], tag));
-        }
+        if (m_labels[bytecodeOffset].isSet())
+            jitCodeMap.append(bytecodeOffset, patchBuffer.locationOf<JSEntryPtrTag>(m_labels[bytecodeOffset]));
     }
     jitCodeMap.finish();
     m_codeBlock->setJITCodeMap(WTFMove(jitCodeMap));
 
-    MacroAssemblerCodePtr withArityCheck = patchBuffer.locationOf(m_arityCheck, CodePtrTag);
+    MacroAssemblerCodePtr<JSEntryPtrTag> withArityCheck = patchBuffer.locationOf<JSEntryPtrTag>(m_arityCheck);
 
     if (Options::dumpDisassembly()) {
         m_disassembler->dump(patchBuffer);
@@ -891,8 +888,8 @@
     if (m_pcToCodeOriginMapBuilder.didBuildMapping())
         m_codeBlock->setPCToCodeOriginMap(std::make_unique<PCToCodeOriginMap>(WTFMove(m_pcToCodeOriginMapBuilder), patchBuffer));
     
-    CodeRef result = FINALIZE_CODE(
-        patchBuffer, CodePtrTag,
+    CodeRef<JSEntryPtrTag> result = FINALIZE_CODE(
+        patchBuffer, JSEntryPtrTag,
         "Baseline JIT code for %s", toCString(CodeBlockWithJITType(m_codeBlock, JITCode::BaselineJIT)).data());
     
     m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT->add(
@@ -933,8 +930,7 @@
         poke(GPRInfo::argumentGPR0);
         poke(GPRInfo::argumentGPR1, 1);
 #endif
-        PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-        m_calls.append(CallRecord(call(tag), std::numeric_limits<unsigned>::max(), FunctionPtr(lookupExceptionHandlerFromCallerFrame, tag)));
+        m_calls.append(CallRecord(call(OperationPtrTag), std::numeric_limits<unsigned>::max(), FunctionPtr<OperationPtrTag>(lookupExceptionHandlerFromCallerFrame)));
         jumpToExceptionHandler(*vm());
     }
 
@@ -953,8 +949,7 @@
         poke(GPRInfo::argumentGPR0);
         poke(GPRInfo::argumentGPR1, 1);
 #endif
-        PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-        m_calls.append(CallRecord(call(tag), std::numeric_limits<unsigned>::max(), FunctionPtr(lookupExceptionHandler, tag)));
+        m_calls.append(CallRecord(call(OperationPtrTag), std::numeric_limits<unsigned>::max(), FunctionPtr<OperationPtrTag>(lookupExceptionHandler)));
         jumpToExceptionHandler(*vm());
     }
 }
diff --git a/Source/JavaScriptCore/jit/JIT.h b/Source/JavaScriptCore/jit/JIT.h
index 8fdbb3b..93c5d44 100644
--- a/Source/JavaScriptCore/jit/JIT.h
+++ b/Source/JavaScriptCore/jit/JIT.h
@@ -70,13 +70,13 @@
     struct CallRecord {
         MacroAssembler::Call from;
         unsigned bytecodeOffset;
-        FunctionPtr callee;
+        FunctionPtr<OperationPtrTag> callee;
 
         CallRecord()
         {
         }
 
-        CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, FunctionPtr callee)
+        CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, FunctionPtr<OperationPtrTag> callee)
             : from(from)
             , bytecodeOffset(bytecodeOffset)
             , callee(callee)
@@ -174,7 +174,7 @@
         CallLinkInfo* callLinkInfo;
     };
 
-    void ctiPatchCallByReturnAddress(ReturnAddressPtr, FunctionPtr newCalleeFunction);
+    void ctiPatchCallByReturnAddress(ReturnAddressPtr, FunctionPtr<CFunctionPtrTag> newCalleeFunction);
 
     class JIT_CLASS_ALIGNMENT JIT : private JSInterfaceJIT {
         friend class JITSlowPathCall;
@@ -267,18 +267,18 @@
         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
 
         // Add a call out from JIT code, without an exception check.
-        Call appendCall(const FunctionPtr function, PtrTag tag)
+        Call appendCall(const FunctionPtr<CFunctionPtrTag> function)
         {
-            Call functionCall = call(tag);
-            m_calls.append(CallRecord(functionCall, m_bytecodeOffset, FunctionPtr(function, tag)));
+            Call functionCall = call(OperationPtrTag);
+            m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.retagged<OperationPtrTag>()));
             return functionCall;
         }
 
 #if OS(WINDOWS) && CPU(X86_64)
-        Call appendCallWithSlowPathReturnType(const FunctionPtr function, PtrTag tag)
+        Call appendCallWithSlowPathReturnType(const FunctionPtr<CFunctionPtrTag> function, PtrTag tag)
         {
             Call functionCall = callWithSlowPathReturnType(tag);
-            m_calls.append(CallRecord(functionCall, m_bytecodeOffset, FunctionPtr(function, tag)));
+            m_calls.append(CallRecord(functionCall, m_bytecodeOffset, function.retagged<OperationPtrTag>()));
             return functionCall;
         }
 #endif
@@ -716,39 +716,23 @@
             linkAllSlowCasesForBytecodeOffset(m_slowCases, iter, m_bytecodeOffset);
         }
 
-        MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr, PtrTag);
+        MacroAssembler::Call appendCallWithExceptionCheck(const FunctionPtr<CFunctionPtrTag>);
 #if OS(WINDOWS) && CPU(X86_64)
-        MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr, PtrTag);
+        MacroAssembler::Call appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr<CFunctionPtrTag>);
 #endif
-        MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr, PtrTag);
-        MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr, PtrTag, int);
-        MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr, PtrTag, int);
+        MacroAssembler::Call appendCallWithCallFrameRollbackOnException(const FunctionPtr<CFunctionPtrTag>);
+        MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr<CFunctionPtrTag>, int);
+        MacroAssembler::Call appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr<CFunctionPtrTag>, int);
         
         template<typename OperationType, typename... Args>
         std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
-        callOperation(OperationType operation, PtrTag tag, int result, Args... args)
-        {
-            setupArguments<OperationType>(args...);
-            return appendCallWithExceptionCheckSetJSValueResult(operation, tag, result);
-        }
-
-        template<typename OperationType, typename... Args>
-        std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
         callOperation(OperationType operation, int result, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperation(operation, tag, result, args...);
+            setupArguments<OperationType>(args...);
+            return appendCallWithExceptionCheckSetJSValueResult(operation, result);
         }
 
 #if OS(WINDOWS) && CPU(X86_64)
-        template<typename OperationType, typename... Args>
-        std::enable_if_t<std::is_same<typename FunctionTraits<OperationType>::ResultType, SlowPathReturnType>::value, MacroAssembler::Call>
-        callOperation(OperationType operation, PtrTag tag, Args... args)
-        {
-            setupArguments<OperationType>(args...);
-            return appendCallWithExceptionCheckAndSlowPathReturnType(operation, tag);
-        }
-
         template<typename Type>
         static constexpr bool is64BitType() { return sizeof(Type) <= 8; }
 
@@ -757,87 +741,51 @@
 
         template<typename OperationType, typename... Args>
         std::enable_if_t<!std::is_same<typename FunctionTraits<OperationType>::ResultType, SlowPathReturnType>::value, MacroAssembler::Call>
-        callOperation(OperationType operation, PtrTag tag, Args... args)
+        callOperation(OperationType operation, Args... args)
         {
             static_assert(is64BitType<typename FunctionTraits<OperationType>::ResultType>(), "Win64 cannot use standard call when return type is larger than 64 bits.");
             setupArguments<OperationType>(args...);
-            return appendCallWithExceptionCheck(operation, tag);
+            return appendCallWithExceptionCheck(operation);
         }
 #else // OS(WINDOWS) && CPU(X86_64)
         template<typename OperationType, typename... Args>
-        MacroAssembler::Call callOperation(OperationType operation, PtrTag tag, Args... args)
-        {
-            setupArguments<OperationType>(args...);
-            return appendCallWithExceptionCheck(operation, tag);
-        }
-#endif // OS(WINDOWS) && CPU(X86_64)
-
-        template<typename OperationType, typename... Args>
         MacroAssembler::Call callOperation(OperationType operation, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperation(operation, tag, args...);
-        }
-
-        template<typename OperationType, typename... Args>
-        std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
-        callOperationWithProfile(OperationType operation, PtrTag tag, int result, Args... args)
-        {
             setupArguments<OperationType>(args...);
-            return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, tag, result);
+            return appendCallWithExceptionCheck(operation);
         }
+#endif // OS(WINDOWS) && CPU(X86_64)
 
         template<typename OperationType, typename... Args>
         std::enable_if_t<FunctionTraits<OperationType>::hasResult, MacroAssembler::Call>
         callOperationWithProfile(OperationType operation, int result, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperationWithProfile(operation, tag, result, args...);
-        }
-
-        template<typename OperationType, typename... Args>
-        MacroAssembler::Call callOperationWithResult(OperationType operation, PtrTag tag, JSValueRegs resultRegs, Args... args)
-        {
             setupArguments<OperationType>(args...);
-            auto result = appendCallWithExceptionCheck(operation, tag);
-            setupResults(resultRegs);
-            return result;
+            return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, result);
         }
 
         template<typename OperationType, typename... Args>
         MacroAssembler::Call callOperationWithResult(OperationType operation, JSValueRegs resultRegs, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperationWithResult(operation, tag, resultRegs, args...);
-        }
-
-        template<typename OperationType, typename... Args>
-        MacroAssembler::Call callOperationNoExceptionCheck(OperationType operation, PtrTag tag, Args... args)
-        {
             setupArguments<OperationType>(args...);
-            updateTopCallFrame();
-            return appendCall(operation, tag);
+            auto result = appendCallWithExceptionCheck(operation);
+            setupResults(resultRegs);
+            return result;
         }
 
         template<typename OperationType, typename... Args>
         MacroAssembler::Call callOperationNoExceptionCheck(OperationType operation, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperationNoExceptionCheck(operation, tag, args...);
-        }
-
-        template<typename OperationType, typename... Args>
-        MacroAssembler::Call callOperationWithCallFrameRollbackOnException(OperationType operation, PtrTag tag, Args... args)
-        {
             setupArguments<OperationType>(args...);
-            return appendCallWithCallFrameRollbackOnException(operation, tag);
+            updateTopCallFrame();
+            return appendCall(operation);
         }
 
         template<typename OperationType, typename... Args>
         MacroAssembler::Call callOperationWithCallFrameRollbackOnException(OperationType operation, Args... args)
         {
-            PtrTag tag = ptrTag(JITOperationPtrTag, nextPtrTagID());
-            return callOperationWithCallFrameRollbackOnException(operation, tag, args...);
+            setupArguments<OperationType>(args...);
+            return appendCallWithCallFrameRollbackOnException(operation);
         }
 
         template<typename SnippetGenerator>
@@ -849,8 +797,8 @@
 
         void updateTopCallFrame();
 
-        Call emitNakedCall(CodePtr function = CodePtr());
-        Call emitNakedTailCall(CodePtr function = CodePtr());
+        Call emitNakedCall(CodePtr<NoPtrTag> function = CodePtr<NoPtrTag>());
+        Call emitNakedTailCall(CodePtr<NoPtrTag> function = CodePtr<NoPtrTag>());
 
         // Loads the character value of a single character string into dst.
         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
@@ -936,7 +884,7 @@
 
         std::unique_ptr<JITDisassembler> m_disassembler;
         RefPtr<Profiler::Compilation> m_compilation;
-        static CodeRef stringGetByValStubGenerator(VM*);
+        static CodeRef<JITThunkPtrTag> stringGetByValStubGenerator(VM*);
 
         PCToCodeOriginMapBuilder m_pcToCodeOriginMapBuilder;
 
diff --git a/Source/JavaScriptCore/jit/JITArithmetic.cpp b/Source/JavaScriptCore/jit/JITArithmetic.cpp
index 13d4271..3761c9b 100644
--- a/Source/JavaScriptCore/jit/JITArithmetic.cpp
+++ b/Source/JavaScriptCore/jit/JITArithmetic.cpp
@@ -705,12 +705,11 @@
 
     bool generatedInlineCode = mathIC->generateInline(*this, mathICGenerationState);
     if (!generatedInlineCode) {
-        PtrTag tag = ptrTag(MathICPtrTag, currentInstruction);
         ArithProfile* arithProfile = mathIC->arithProfile();
         if (arithProfile && shouldEmitProfiling())
-            callOperationWithResult(profiledFunction, tag, resultRegs, srcRegs, arithProfile);
+            callOperationWithResult(profiledFunction, resultRegs, srcRegs, arithProfile);
         else
-            callOperationWithResult(nonProfiledFunction, tag, resultRegs, srcRegs);
+            callOperationWithResult(nonProfiledFunction, resultRegs, srcRegs);
     } else
         addSlowCase(mathICGenerationState.slowPathJumps);
 
@@ -779,12 +778,11 @@
             emitGetVirtualRegister(op1, leftRegs);
         else if (rightOperand.isConst())
             emitGetVirtualRegister(op2, rightRegs);
-        PtrTag tag = ptrTag(MathICPtrTag, currentInstruction);
         ArithProfile* arithProfile = mathIC->arithProfile();
         if (arithProfile && shouldEmitProfiling())
-            callOperationWithResult(profiledFunction, tag, resultRegs, leftRegs, rightRegs, arithProfile);
+            callOperationWithResult(profiledFunction, resultRegs, leftRegs, rightRegs, arithProfile);
         else
-            callOperationWithResult(nonProfiledFunction, tag, resultRegs, leftRegs, rightRegs);
+            callOperationWithResult(nonProfiledFunction, resultRegs, leftRegs, rightRegs);
     } else
         addSlowCase(mathICGenerationState.slowPathJumps);
 
@@ -819,15 +817,14 @@
     auto slowPathStart = label();
 #endif
 
-    PtrTag tag = ptrTag(MathICPtrTag, currentInstruction);
     ArithProfile* arithProfile = mathIC->arithProfile();
     if (arithProfile && shouldEmitProfiling()) {
         if (mathICGenerationState.shouldSlowPathRepatch)
-            mathICGenerationState.slowPathCall = callOperationWithResult(reinterpret_cast<J_JITOperation_EJMic>(profiledRepatchFunction), tag, resultRegs, srcRegs, TrustedImmPtr(mathIC));
+            mathICGenerationState.slowPathCall = callOperationWithResult(reinterpret_cast<J_JITOperation_EJMic>(profiledRepatchFunction), resultRegs, srcRegs, TrustedImmPtr(mathIC));
         else
-            mathICGenerationState.slowPathCall = callOperationWithResult(profiledFunction, tag, resultRegs, srcRegs, arithProfile);
+            mathICGenerationState.slowPathCall = callOperationWithResult(profiledFunction, resultRegs, srcRegs, arithProfile);
     } else
-        mathICGenerationState.slowPathCall = callOperationWithResult(reinterpret_cast<J_JITOperation_EJMic>(repatchFunction), tag, resultRegs, srcRegs, TrustedImmPtr(mathIC));
+        mathICGenerationState.slowPathCall = callOperationWithResult(reinterpret_cast<J_JITOperation_EJMic>(repatchFunction), resultRegs, srcRegs, TrustedImmPtr(mathIC));
 
 #if ENABLE(MATH_IC_STATS)
     auto slowPathEnd = label();
@@ -886,15 +883,14 @@
     auto slowPathStart = label();
 #endif
 
-    PtrTag callTag = ptrTag(MathICPtrTag, currentInstruction);
     ArithProfile* arithProfile = mathIC->arithProfile();
     if (arithProfile && shouldEmitProfiling()) {
         if (mathICGenerationState.shouldSlowPathRepatch)
-            mathICGenerationState.slowPathCall = callOperationWithResult(bitwise_cast<J_JITOperation_EJJMic>(profiledRepatchFunction), callTag, resultRegs, leftRegs, rightRegs, TrustedImmPtr(mathIC));
+            mathICGenerationState.slowPathCall = callOperationWithResult(bitwise_cast<J_JITOperation_EJJMic>(profiledRepatchFunction), resultRegs, leftRegs, rightRegs, TrustedImmPtr(mathIC));
         else
-            mathICGenerationState.slowPathCall = callOperationWithResult(profiledFunction, callTag, resultRegs, leftRegs, rightRegs, arithProfile);
+            mathICGenerationState.slowPathCall = callOperationWithResult(profiledFunction, resultRegs, leftRegs, rightRegs, arithProfile);
     } else
-        mathICGenerationState.slowPathCall = callOperationWithResult(bitwise_cast<J_JITOperation_EJJMic>(repatchFunction), callTag, resultRegs, leftRegs, rightRegs, TrustedImmPtr(mathIC));
+        mathICGenerationState.slowPathCall = callOperationWithResult(bitwise_cast<J_JITOperation_EJJMic>(repatchFunction), resultRegs, leftRegs, rightRegs, TrustedImmPtr(mathIC));
 
 #if ENABLE(MATH_IC_STATS)
     auto slowPathEnd = label();
diff --git a/Source/JavaScriptCore/jit/JITCall.cpp b/Source/JavaScriptCore/jit/JITCall.cpp
index bf1054b..1daa7bc 100644
--- a/Source/JavaScriptCore/jit/JITCall.cpp
+++ b/Source/JavaScriptCore/jit/JITCall.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2013-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -250,9 +250,8 @@
 
     move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
 
-    PtrTag linkTag = ptrTag(LinkCallPtrTag, m_vm);
     m_callCompilationInfo[callLinkInfoIndex].callReturnLocation =
-        emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag));
+        emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).retaggedCode<NoPtrTag>());
 
     if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs) {
         abortWithReason(JITDidReturnFromTailCall);
diff --git a/Source/JavaScriptCore/jit/JITCall32_64.cpp b/Source/JavaScriptCore/jit/JITCall32_64.cpp
index dd7e112..fdc4217 100644
--- a/Source/JavaScriptCore/jit/JITCall32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITCall32_64.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2013-2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -318,7 +318,7 @@
     if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs)
         emitRestoreCalleeSaves();
 
-    m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).code());
+    m_callCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).retaggedCode<NoPtrTag>());
 
     if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs) {
         abortWithReason(JITDidReturnFromTailCall);
diff --git a/Source/JavaScriptCore/jit/JITCode.cpp b/Source/JavaScriptCore/jit/JITCode.cpp
index aa48dd5..c4c9b07 100644
--- a/Source/JavaScriptCore/jit/JITCode.cpp
+++ b/Source/JavaScriptCore/jit/JITCode.cpp
@@ -105,7 +105,7 @@
 {
 }
 
-JITCodeWithCodeRef::JITCodeWithCodeRef(CodeRef ref, JITType jitType)
+JITCodeWithCodeRef::JITCodeWithCodeRef(CodeRef<JSEntryPtrTag> ref, JITType jitType)
     : JITCode(jitType)
     , m_ref(ref)
 {
@@ -121,12 +121,12 @@
 void* JITCodeWithCodeRef::executableAddressAtOffset(size_t offset)
 {
     RELEASE_ASSERT(m_ref);
-    assertIsTaggedWith(m_ref.code().executableAddress(), CodePtrTag);
+    assertIsTaggedWith(m_ref.code().executableAddress(), JSEntryPtrTag);
     if (!offset)
         return m_ref.code().executableAddress();
 
-    char* executableAddress = untagCodePtr<char*>(m_ref.code().executableAddress(), CodePtrTag);
-    return tagCodePtr(executableAddress + offset, CodePtrTag);
+    char* executableAddress = untagCodePtr<char*, JSEntryPtrTag>(m_ref.code().executableAddress());
+    return tagCodePtr<JSEntryPtrTag>(executableAddress + offset);
 }
 
 void* JITCodeWithCodeRef::dataAddressAtOffset(size_t offset)
@@ -161,7 +161,7 @@
 {
 }
 
-DirectJITCode::DirectJITCode(JITCode::CodeRef ref, JITCode::CodePtr withArityCheck, JITType jitType)
+DirectJITCode::DirectJITCode(JITCode::CodeRef<JSEntryPtrTag> ref, JITCode::CodePtr<JSEntryPtrTag> withArityCheck, JITType jitType)
     : JITCodeWithCodeRef(ref, jitType)
     , m_withArityCheck(withArityCheck)
 {
@@ -173,7 +173,7 @@
 {
 }
 
-void DirectJITCode::initializeCodeRef(JITCode::CodeRef ref, JITCode::CodePtr withArityCheck)
+void DirectJITCode::initializeCodeRef(JITCode::CodeRef<JSEntryPtrTag> ref, JITCode::CodePtr<JSEntryPtrTag> withArityCheck)
 {
     RELEASE_ASSERT(!m_ref);
     m_ref = ref;
@@ -182,7 +182,7 @@
     ASSERT(m_withArityCheck);
 }
 
-JITCode::CodePtr DirectJITCode::addressForCall(ArityCheckMode arity)
+JITCode::CodePtr<JSEntryPtrTag> DirectJITCode::addressForCall(ArityCheckMode arity)
 {
     switch (arity) {
     case ArityCheckNotRequired:
@@ -193,7 +193,7 @@
         return m_withArityCheck;
     }
     RELEASE_ASSERT_NOT_REACHED();
-    return CodePtr();
+    return CodePtr<JSEntryPtrTag>();
 }
 
 NativeJITCode::NativeJITCode(JITType jitType)
@@ -201,7 +201,7 @@
 {
 }
 
-NativeJITCode::NativeJITCode(CodeRef ref, JITType jitType)
+NativeJITCode::NativeJITCode(CodeRef<JSEntryPtrTag> ref, JITType jitType)
     : JITCodeWithCodeRef(ref, jitType)
 {
 }
@@ -210,13 +210,13 @@
 {
 }
 
-void NativeJITCode::initializeCodeRef(CodeRef ref)
+void NativeJITCode::initializeCodeRef(CodeRef<JSEntryPtrTag> ref)
 {
     ASSERT(!m_ref);
     m_ref = ref;
 }
 
-JITCode::CodePtr NativeJITCode::addressForCall(ArityCheckMode arity)
+JITCode::CodePtr<JSEntryPtrTag> NativeJITCode::addressForCall(ArityCheckMode arity)
 {
     RELEASE_ASSERT(m_ref);
     switch (arity) {
@@ -226,7 +226,7 @@
         return m_ref.code();
     }
     RELEASE_ASSERT_NOT_REACHED();
-    return CodePtr();
+    return CodePtr<JSEntryPtrTag>();
 }
 
 #if ENABLE(JIT)
diff --git a/Source/JavaScriptCore/jit/JITCode.h b/Source/JavaScriptCore/jit/JITCode.h
index af4e343..66c5845 100644
--- a/Source/JavaScriptCore/jit/JITCode.h
+++ b/Source/JavaScriptCore/jit/JITCode.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -50,8 +50,8 @@
 
 class JITCode : public ThreadSafeRefCounted<JITCode> {
 public:
-    typedef MacroAssemblerCodeRef CodeRef;
-    typedef MacroAssemblerCodePtr CodePtr;
+    template<PtrTag tag> using CodePtr = MacroAssemblerCodePtr<tag>;
+    template<PtrTag tag> using CodeRef = MacroAssemblerCodeRef<tag>;
 
     enum JITType : uint8_t {
         None,
@@ -172,7 +172,7 @@
         return jitCode->jitType();
     }
     
-    virtual CodePtr addressForCall(ArityCheckMode) = 0;
+    virtual CodePtr<JSEntryPtrTag> addressForCall(ArityCheckMode) = 0;
     virtual void* executableAddressAtOffset(size_t offset) = 0;
     void* executableAddress() { return executableAddressAtOffset(0); }
     virtual void* dataAddressAtOffset(size_t offset) = 0;
@@ -205,7 +205,7 @@
 class JITCodeWithCodeRef : public JITCode {
 protected:
     JITCodeWithCodeRef(JITType);
-    JITCodeWithCodeRef(CodeRef, JITType);
+    JITCodeWithCodeRef(CodeRef<JSEntryPtrTag>, JITType);
 
 public:
     virtual ~JITCodeWithCodeRef();
@@ -217,32 +217,32 @@
     bool contains(void*) override;
 
 protected:
-    CodeRef m_ref;
+    CodeRef<JSEntryPtrTag> m_ref;
 };
 
 class DirectJITCode : public JITCodeWithCodeRef {
 public:
     DirectJITCode(JITType);
-    DirectJITCode(CodeRef, CodePtr withArityCheck, JITType);
+    DirectJITCode(CodeRef<JSEntryPtrTag>, CodePtr<JSEntryPtrTag> withArityCheck, JITType);
     virtual ~DirectJITCode();
     
-    void initializeCodeRef(CodeRef, CodePtr withArityCheck);
+    void initializeCodeRef(CodeRef<JSEntryPtrTag>, CodePtr<JSEntryPtrTag> withArityCheck);
 
-    CodePtr addressForCall(ArityCheckMode) override;
+    CodePtr<JSEntryPtrTag> addressForCall(ArityCheckMode) override;
 
 private:
-    CodePtr m_withArityCheck;
+    CodePtr<JSEntryPtrTag> m_withArityCheck;
 };
 
 class NativeJITCode : public JITCodeWithCodeRef {
 public:
     NativeJITCode(JITType);
-    NativeJITCode(CodeRef, JITType);
+    NativeJITCode(CodeRef<JSEntryPtrTag>, JITType);
     virtual ~NativeJITCode();
     
-    void initializeCodeRef(CodeRef);
+    void initializeCodeRef(CodeRef<JSEntryPtrTag>);
 
-    CodePtr addressForCall(ArityCheckMode) override;
+    CodePtr<JSEntryPtrTag> addressForCall(ArityCheckMode) override;
 };
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/jit/JITCodeMap.h b/Source/JavaScriptCore/jit/JITCodeMap.h
index ea28e57..e1308f3 100644
--- a/Source/JavaScriptCore/jit/JITCodeMap.h
+++ b/Source/JavaScriptCore/jit/JITCodeMap.h
@@ -38,28 +38,28 @@
     struct Entry {
         Entry() { }
 
-        Entry(unsigned bytecodeIndex, CodeLocationLabel codeLocation)
+        Entry(unsigned bytecodeIndex, CodeLocationLabel<JSEntryPtrTag> codeLocation)
             : m_bytecodeIndex(bytecodeIndex)
             , m_codeLocation(codeLocation)
         { }
 
         inline unsigned bytecodeIndex() const { return m_bytecodeIndex; }
-        inline CodeLocationLabel codeLocation() { return m_codeLocation; }
+        inline CodeLocationLabel<JSEntryPtrTag> codeLocation() { return m_codeLocation; }
 
     private:
         unsigned m_bytecodeIndex;
-        CodeLocationLabel m_codeLocation;
+        CodeLocationLabel<JSEntryPtrTag> m_codeLocation;
     };
 
 public:
-    void append(unsigned bytecodeIndex, CodeLocationLabel codeLocation)
+    void append(unsigned bytecodeIndex, CodeLocationLabel<JSEntryPtrTag> codeLocation)
     {
         m_entries.append({ bytecodeIndex, codeLocation });
     }
 
     void finish() { m_entries.shrinkToFit(); }
 
-    CodeLocationLabel find(unsigned bytecodeIndex) const
+    CodeLocationLabel<JSEntryPtrTag> find(unsigned bytecodeIndex) const
     {
         auto* entry =
             binarySearch<Entry, unsigned>(m_entries,
@@ -67,7 +67,7 @@
                     return entry->bytecodeIndex();
                 });
         if (!entry)
-            return CodeLocationLabel();
+            return CodeLocationLabel<JSEntryPtrTag>();
         return entry->codeLocation();
     }
 
diff --git a/Source/JavaScriptCore/jit/JITDisassembler.cpp b/Source/JavaScriptCore/jit/JITDisassembler.cpp
index 6d15330..989eca2 100644
--- a/Source/JavaScriptCore/jit/JITDisassembler.cpp
+++ b/Source/JavaScriptCore/jit/JITDisassembler.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -161,9 +161,9 @@
 
 void JITDisassembler::dumpDisassembly(PrintStream& out, LinkBuffer& linkBuffer, MacroAssembler::Label from, MacroAssembler::Label to)
 {
-    CodeLocationLabel fromLocation = linkBuffer.locationOf(from);
-    CodeLocationLabel toLocation = linkBuffer.locationOf(to);
-    disassemble(fromLocation, toLocation.executableAddress<uintptr_t>() - fromLocation.executableAddress<uintptr_t>(), "        ", out);
+    CodeLocationLabel<DisassemblyPtrTag> fromLocation = linkBuffer.locationOf<DisassemblyPtrTag>(from);
+    CodeLocationLabel<DisassemblyPtrTag> toLocation = linkBuffer.locationOf<DisassemblyPtrTag>(to);
+    disassemble(fromLocation, toLocation.dataLocation<uintptr_t>() - fromLocation.dataLocation<uintptr_t>(), "        ", out);
 }
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/jit/JITExceptions.cpp b/Source/JavaScriptCore/jit/JITExceptions.cpp
index 3f4eadb..45c72ce 100644
--- a/Source/JavaScriptCore/jit/JITExceptions.cpp
+++ b/Source/JavaScriptCore/jit/JITExceptions.cpp
@@ -82,7 +82,7 @@
         catchRoutine = catchPCForInterpreter->u.pointer;
 #endif
     } else
-        catchRoutine = LLInt::getCodePtr(handleUncaughtException);
+        catchRoutine = LLInt::getExecutableAddress(handleUncaughtException);
     
     ASSERT(bitwise_cast<uintptr_t>(callFrame) < bitwise_cast<uintptr_t>(vm->topEntryFrame));
 
diff --git a/Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp b/Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp
index 9dd824d..00eb978 100644
--- a/Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp
+++ b/Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -73,18 +73,18 @@
 void JITByIdGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
 {
     ASSERT(m_start.isSet());
-    CodeLocationLabel start = fastPath.locationOf(m_start);
+    CodeLocationLabel<JITStubRoutinePtrTag> start = fastPath.locationOf<JITStubRoutinePtrTag>(m_start);
     m_stubInfo->patch.start = start;
 
     int32_t inlineSize = MacroAssembler::differenceBetweenCodePtr(
-        start, fastPath.locationOf(m_done));
+        start, fastPath.locationOf<NoPtrTag>(m_done));
     ASSERT(inlineSize > 0);
     m_stubInfo->patch.inlineSize = inlineSize;
 
     m_stubInfo->patch.deltaFromStartToSlowPathCallLocation = MacroAssembler::differenceBetweenCodePtr(
-        start, slowPath.locationOf(m_slowPathCall));
+        start, slowPath.locationOf<NoPtrTag>(m_slowPathCall));
     m_stubInfo->patch.deltaFromStartToSlowPathStart = MacroAssembler::differenceBetweenCodePtr(
-        start, slowPath.locationOf(m_slowPathBegin));
+        start, slowPath.locationOf<NoPtrTag>(m_slowPathBegin));
 }
 
 void JITByIdGenerator::finalize(LinkBuffer& linkBuffer)
diff --git a/Source/JavaScriptCore/jit/JITInlines.h b/Source/JavaScriptCore/jit/JITInlines.h
index 34c42b6..1709007 100644
--- a/Source/JavaScriptCore/jit/JITInlines.h
+++ b/Source/JavaScriptCore/jit/JITInlines.h
@@ -116,21 +116,19 @@
     cont8Bit.link(this);
 }
 
-ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
+ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr<NoPtrTag> target)
 {
     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
     Call nakedCall = nearCall();
-    assertIsNullOrTaggedWith(function.executableAddress(), NearCodePtrTag);
-    m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, FunctionPtr(function)));
+    m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, FunctionPtr<OperationPtrTag>(target.retagged<OperationPtrTag>())));
     return nakedCall;
 }
 
-ALWAYS_INLINE JIT::Call JIT::emitNakedTailCall(CodePtr function)
+ALWAYS_INLINE JIT::Call JIT::emitNakedTailCall(CodePtr<NoPtrTag> target)
 {
     ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
     Call nakedCall = nearTailCall();
-    assertIsNullOrTaggedWith(function.executableAddress(), NearCodePtrTag);
-    m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, FunctionPtr(function)));
+    m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, FunctionPtr<OperationPtrTag>(target.retagged<OperationPtrTag>())));
     return nakedCall;
 }
 
@@ -151,35 +149,35 @@
     storePtr(callFrameRegister, &m_vm->topCallFrame);
 }
 
-ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr function, PtrTag tag)
+ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr<CFunctionPtrTag> function)
 {
     updateTopCallFrame();
-    MacroAssembler::Call call = appendCall(function, tag);
+    MacroAssembler::Call call = appendCall(function);
     exceptionCheck();
     return call;
 }
 
 #if OS(WINDOWS) && CPU(X86_64)
-ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr function, PtrTag  tag)
+ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr<CFunctionPtrTag> function)
 {
     updateTopCallFrame();
-    MacroAssembler::Call call = appendCallWithSlowPathReturnType(function, tag);
+    MacroAssembler::Call call = appendCallWithSlowPathReturnType(function);
     exceptionCheck();
     return call;
 }
 #endif
 
-ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr function, PtrTag tag)
+ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr<CFunctionPtrTag> function)
 {
     updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
-    MacroAssembler::Call call = appendCall(function, tag);
+    MacroAssembler::Call call = appendCall(function);
     exceptionCheckWithCallFrameRollback();
     return call;
 }
 
-ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr function, PtrTag tag, int dst)
+ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr<CFunctionPtrTag> function, int dst)
 {
-    MacroAssembler::Call call = appendCallWithExceptionCheck(function, tag);
+    MacroAssembler::Call call = appendCallWithExceptionCheck(function);
 #if USE(JSVALUE64)
     emitPutVirtualRegister(dst, returnValueGPR);
 #else
@@ -188,9 +186,9 @@
     return call;
 }
 
-ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr function, PtrTag tag, int dst)
+ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr<CFunctionPtrTag> function, int dst)
 {
-    MacroAssembler::Call call = appendCallWithExceptionCheck(function, tag);
+    MacroAssembler::Call call = appendCallWithExceptionCheck(function);
     emitValueProfilingSite();
 #if USE(JSVALUE64)
     emitPutVirtualRegister(dst, returnValueGPR);
diff --git a/Source/JavaScriptCore/jit/JITMathIC.h b/Source/JavaScriptCore/jit/JITMathIC.h
index 01da86a..5602437 100644
--- a/Source/JavaScriptCore/jit/JITMathIC.h
+++ b/Source/JavaScriptCore/jit/JITMathIC.h
@@ -62,9 +62,11 @@
     {
     }
 
-    CodeLocationLabel doneLocation() { return m_inlineStart.labelAtOffset(m_inlineSize); }
-    CodeLocationLabel slowPathStartLocation() { return m_inlineStart.labelAtOffset(m_deltaFromStartToSlowPathStart); }
-    CodeLocationCall slowPathCallLocation() { return m_inlineStart.callAtOffset(m_deltaFromStartToSlowPathCallLocation); }
+    // FIXME: These should be tagged with JSInternalPtrTag instead of JSEntryTag.
+    // https://bugs.webkit.org/show_bug.cgi?id=184712
+    CodeLocationLabel<JSEntryPtrTag> doneLocation() { return m_inlineStart.labelAtOffset(m_inlineSize); }
+    CodeLocationLabel<JSEntryPtrTag> slowPathStartLocation() { return m_inlineStart.labelAtOffset(m_deltaFromStartToSlowPathStart); }
+    CodeLocationCall<JSEntryPtrTag> slowPathCallLocation() { return m_inlineStart.callAtOffset(m_deltaFromStartToSlowPathCallLocation); }
     
     bool generateInline(CCallHelpers& jit, MathICGenerationState& state, bool shouldEmitProfiling = true)
     {
@@ -129,7 +131,7 @@
         return false;
     }
 
-    void generateOutOfLine(CodeBlock* codeBlock, FunctionPtr callReplacement)
+    void generateOutOfLine(CodeBlock* codeBlock, FunctionPtr<CFunctionPtrTag> callReplacement)
     {
         auto linkJumpToOutOfLineSnippet = [&] () {
             CCallHelpers jit(codeBlock);
@@ -139,13 +141,16 @@
             RELEASE_ASSERT(jit.m_assembler.buffer().codeSize() <= static_cast<size_t>(m_inlineSize));
             LinkBuffer linkBuffer(jit, m_inlineStart.dataLocation(), jit.m_assembler.buffer().codeSize(), JITCompilationMustSucceed, needsBranchCompaction);
             RELEASE_ASSERT(linkBuffer.isValid());
-            linkBuffer.link(jump, CodeLocationLabel(m_code.code()));
-            FINALIZE_CODE(linkBuffer, NearCodePtrTag, "JITMathIC: linking constant jump to out of line stub");
+            linkBuffer.link(jump, CodeLocationLabel<JITStubRoutinePtrTag>(m_code.code()));
+            FINALIZE_CODE(linkBuffer, NoPtrTag, "JITMathIC: linking constant jump to out of line stub");
         };
 
         auto replaceCall = [&] () {
-            PtrTag callTag = ptrTag(MathICPtrTag, m_instruction);
-            ftlThunkAwareRepatchCall(codeBlock, slowPathCallLocation(), callReplacement, callTag);
+#if COMPILER(MSVC)
+            ftlThunkAwareRepatchCall(codeBlock, slowPathCallLocation().retagged<JSInternalPtrTag>(), callReplacement);
+#else
+            ftlThunkAwareRepatchCall(codeBlock, slowPathCallLocation().template retagged<JSInternalPtrTag>(), callReplacement);
+#endif
         };
 
         bool shouldEmitProfiling = !JITCode::isOptimizingJIT(codeBlock->jitType());
@@ -168,7 +173,7 @@
                     linkBuffer.link(jumpToDone, doneLocation());
 
                     m_code = FINALIZE_CODE_FOR(
-                        codeBlock, linkBuffer, NearCodePtrTag, "JITMathIC: generating out of line fast IC snippet");
+                        codeBlock, linkBuffer, JITStubRoutinePtrTag, "JITMathIC: generating out of line fast IC snippet");
 
                     if (!generationState.shouldSlowPathRepatch) {
                         // We won't need to regenerate, so we can wire the slow path call
@@ -210,7 +215,7 @@
             linkBuffer.link(slowPathJumpList, slowPathStartLocation());
 
             m_code = FINALIZE_CODE_FOR(
-                codeBlock, linkBuffer, NearCodePtrTag, "JITMathIC: generating out of line IC snippet");
+                codeBlock, linkBuffer, JITStubRoutinePtrTag, "JITMathIC: generating out of line IC snippet");
         }
 
         linkJumpToOutOfLineSnippet();
@@ -218,17 +223,17 @@
 
     void finalizeInlineCode(const MathICGenerationState& state, LinkBuffer& linkBuffer)
     {
-        CodeLocationLabel start = linkBuffer.locationOf(state.fastPathStart, NearCodePtrTag);
+        CodeLocationLabel<JSEntryPtrTag> start = linkBuffer.locationOf<JSEntryPtrTag>(state.fastPathStart);
         m_inlineStart = start;
 
         m_inlineSize = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(state.fastPathEnd, NoPtrTag));
+            start, linkBuffer.locationOf<NoPtrTag>(state.fastPathEnd));
         ASSERT(m_inlineSize > 0);
 
         m_deltaFromStartToSlowPathCallLocation = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(state.slowPathCall));
+            start, linkBuffer.locationOf<NoPtrTag>(state.slowPathCall));
         m_deltaFromStartToSlowPathStart = MacroAssembler::differenceBetweenCodePtr(
-            start, linkBuffer.locationOf(state.slowPathStart));
+            start, linkBuffer.locationOf<NoPtrTag>(state.slowPathStart));
     }
 
     ArithProfile* arithProfile() const { return m_arithProfile; }
@@ -247,8 +252,10 @@
 
     ArithProfile* m_arithProfile;
     Instruction* m_instruction;
-    MacroAssemblerCodeRef m_code;
-    CodeLocationLabel m_inlineStart;
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> m_code;
+    // FIXME: These should be tagged with JSInternalPtrTag instead of JSEntryTag.
+    // https://bugs.webkit.org/show_bug.cgi?id=184712
+    CodeLocationLabel<JSEntryPtrTag> m_inlineStart;
     int32_t m_inlineSize;
     int32_t m_deltaFromStartToSlowPathCallLocation;
     int32_t m_deltaFromStartToSlowPathStart;
diff --git a/Source/JavaScriptCore/jit/JITOpcodes.cpp b/Source/JavaScriptCore/jit/JITOpcodes.cpp
index cca344f..3296e0b 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes.cpp
@@ -642,8 +642,7 @@
         callOperation(operationTryOSREnterAtCatchAndValueProfile, m_bytecodeOffset);
     auto skipOSREntry = branchTestPtr(Zero, returnValueGPR);
     emitRestoreCalleeSaves();
-    PtrTag exceptionHandlerTag = ExceptionHandlerPtrTag;
-    jump(returnValueGPR, exceptionHandlerTag);
+    jump(returnValueGPR, ExceptionHandlerPtrTag);
     skipOSREntry.link(this);
     if (buffer && shouldEmitProfiling()) {
         buffer->forEach([&] (ValueProfileAndOperand& profile) {
@@ -681,7 +680,7 @@
 
     emitGetVirtualRegister(scrutinee, regT0);
     callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex);
-    jump(returnValueGPR, ptrTag(SwitchTablePtrTag, jumpTable));
+    jump(returnValueGPR, JSSwitchPtrTag);
 }
 
 void JIT::emit_op_switch_char(Instruction* currentInstruction)
@@ -697,7 +696,7 @@
 
     emitGetVirtualRegister(scrutinee, regT0);
     callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex);
-    jump(returnValueGPR, ptrTag(SwitchTablePtrTag, jumpTable));
+    jump(returnValueGPR, JSSwitchPtrTag);
 }
 
 void JIT::emit_op_switch_string(Instruction* currentInstruction)
@@ -712,7 +711,7 @@
 
     emitGetVirtualRegister(scrutinee, regT0);
     callOperation(operationSwitchStringWithUnknownKeyType, regT0, tableIndex);
-    jump(returnValueGPR, ptrTag(SwitchTablePtrTag, jumpTable));
+    jump(returnValueGPR, JSSwitchPtrTag);
 }
 
 void JIT::emit_op_debug(Instruction* currentInstruction)
@@ -1163,17 +1162,17 @@
 
     LinkBuffer patchBuffer(*this, m_codeBlock);
     
-    patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     
     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
     
     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-        m_codeBlock, patchBuffer, NearCodePtrTag,
+        m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
     
-    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric, HasPropertyPtrTag));
+    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationHasIndexedPropertyGeneric));
 }
 
 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
@@ -1232,7 +1231,7 @@
     
     emitGetVirtualRegister(base, regT0);
     emitGetVirtualRegister(property, regT1);
-    Call call = callOperation(operationHasIndexedPropertyDefault, HasPropertyPtrTag, dst, regT0, regT1, byValInfo);
+    Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo);
 
     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
diff --git a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
index a438662..4c2e445 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
@@ -1037,17 +1037,17 @@
 
     LinkBuffer patchBuffer(*this, m_codeBlock);
     
-    patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     
     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
 
     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-        m_codeBlock, patchBuffer, NoPtrTag,
+        m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
         "Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
     
-    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric, NoPtrTag));
+    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationHasIndexedPropertyGeneric));
 }
 
 void JIT::emit_op_has_indexed_property(Instruction* currentInstruction)
diff --git a/Source/JavaScriptCore/jit/JITOperations.cpp b/Source/JavaScriptCore/jit/JITOperations.cpp
index 166231b..5ec06b0 100644
--- a/Source/JavaScriptCore/jit/JITOperations.cpp
+++ b/Source/JavaScriptCore/jit/JITOperations.cpp
@@ -801,7 +801,7 @@
     if (tryPutByValOptimize(exec, baseValue, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
         // Don't ever try to optimize.
         byValInfo->tookSlowPath = true;
-        ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationPutByValGeneric, PutPropertyPtrTag));
+        ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationPutByValGeneric);
     }
     putByVal(exec, baseValue, subscript, value, byValInfo);
 }
@@ -885,7 +885,7 @@
     if (tryDirectPutByValOptimize(exec, object, subscript, byValInfo, ReturnAddressPtr(OUR_RETURN_ADDRESS)) == OptimizationResult::GiveUp) {
         // Don't ever try to optimize.
         byValInfo->tookSlowPath = true;
-        ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationDirectPutByValGeneric, PutPropertyPtrTag));
+        ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationDirectPutByValGeneric);
     }
 
     directPutByVal(exec, object, subscript, value, byValInfo);
@@ -932,12 +932,11 @@
     return JSValue::encode(result);
 }
 
-static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo, PtrTag resultTag)
+static SlowPathReturnType handleHostCall(ExecState* execCallee, JSValue callee, CallLinkInfo* callLinkInfo)
 {
     ExecState* exec = execCallee->callerFrame();
     VM* vm = &exec->vm();
     auto scope = DECLARE_THROW_SCOPE(*vm);
-    PtrTag throwExceptionTag = ptrTag(ThrowExceptionPtrTag, vm);
 
     execCallee->setCodeBlock(0);
 
@@ -953,19 +952,19 @@
             vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
             if (UNLIKELY(scope.exception())) {
                 return encodeResult(
-                    vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+                    vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
                     reinterpret_cast<void*>(KeepTheFrame));
             }
 
             return encodeResult(
-                tagCFunctionPtr<void*>(getHostCallReturnValue, resultTag),
+                tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue),
                 reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
         }
     
         ASSERT(callType == CallType::None);
         throwException(exec, scope, createNotAFunctionError(exec, callee));
         return encodeResult(
-            vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+            vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
             reinterpret_cast<void*>(KeepTheFrame));
     }
 
@@ -982,17 +981,17 @@
         vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
         if (UNLIKELY(scope.exception())) {
             return encodeResult(
-                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
                 reinterpret_cast<void*>(KeepTheFrame));
         }
 
-        return encodeResult(tagCFunctionPtr<void*>(getHostCallReturnValue, resultTag), reinterpret_cast<void*>(KeepTheFrame));
+        return encodeResult(tagCFunctionPtr<void*, JSEntryPtrTag>(getHostCallReturnValue), reinterpret_cast<void*>(KeepTheFrame));
     }
     
     ASSERT(constructType == ConstructType::None);
     throwException(exec, scope, createNotAConstructorError(exec, callee));
     return encodeResult(
-        vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+        vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
         reinterpret_cast<void*>(KeepTheFrame));
 }
 
@@ -1002,7 +1001,6 @@
     VM* vm = &exec->vm();
     auto throwScope = DECLARE_THROW_SCOPE(*vm);
 
-    PtrTag linkedTargetTag = ptrTag(LinkCallResultPtrTag, vm);
     CodeSpecializationKind kind = callLinkInfo->specializationKind();
     NativeCallFrameTracer tracer(vm, exec);
     
@@ -1012,26 +1010,26 @@
     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
     if (!calleeAsFunctionCell) {
         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
-            MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
+            MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
             RELEASE_ASSERT(!!codePtr);
 
             if (!callLinkInfo->seenOnce())
                 callLinkInfo->setSeen();
             else
-                linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr, CodePtrTag);
+                linkFor(execCallee, *callLinkInfo, nullptr, internalFunction, codePtr);
 
-            void* linkedTarget = retagCodePtr(codePtr.executableAddress(), CodePtrTag, linkedTargetTag);
+            void* linkedTarget = codePtr.executableAddress();
             return encodeResult(linkedTarget, reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
         }
         throwScope.release();
-        return handleHostCall(execCallee, calleeAsValue, callLinkInfo, linkedTargetTag);
+        return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
     }
 
     JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
     JSScope* scope = callee->scopeUnchecked();
     ExecutableBase* executable = callee->executable();
 
-    MacroAssemblerCodePtr codePtr;
+    MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
     CodeBlock* codeBlock = nullptr;
     if (executable->isHostFunction())
         codePtr = executable->entrypointFor(kind, MustCheckArity);
@@ -1039,8 +1037,7 @@
         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
 
         auto handleThrowException = [&] () {
-            PtrTag throwExceptionTag = ptrTag(ThrowExceptionPtrTag, vm);
-            void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, linkedTargetTag).executableAddress();
+            void* throwTarget = vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress();
             return encodeResult(throwTarget, reinterpret_cast<void*>(KeepTheFrame));
         };
 
@@ -1065,9 +1062,9 @@
     if (!callLinkInfo->seenOnce())
         callLinkInfo->setSeen();
     else
-        linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr, CodePtrTag);
+        linkFor(execCallee, *callLinkInfo, codeBlock, callee, codePtr);
 
-    return encodeResult(codePtr.retagged(CodePtrTag, linkedTargetTag).executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
+    return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
 }
 
 void JIT_OPERATION operationLinkDirectCall(ExecState* exec, CallLinkInfo* callLinkInfo, JSFunction* callee)
@@ -1097,10 +1094,10 @@
 
     JSScope* scope = callee->scopeUnchecked();
 
-    MacroAssemblerCodePtr codePtr;
+    MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
     CodeBlock* codeBlock = nullptr;
     if (executable->isHostFunction())
-        codePtr = executable->entrypointFor(kind, MustCheckArity).retagged(CodePtrTag, NearCodePtrTag);
+        codePtr = executable->entrypointFor(kind, MustCheckArity);
     else {
         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
 
@@ -1112,16 +1109,16 @@
             return;
         unsigned argumentStackSlots = callLinkInfo->maxNumArguments();
         if (argumentStackSlots < static_cast<size_t>(codeBlock->numParameters()))
-            codePtr = functionExecutable->entrypointFor(kind, MustCheckArity).retagged(CodePtrTag, NearCodePtrTag);
+            codePtr = functionExecutable->entrypointFor(kind, MustCheckArity);
         else
-            codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired).retagged(CodePtrTag, NearCodePtrTag);
+            codePtr = functionExecutable->entrypointFor(kind, ArityCheckNotRequired);
     }
     
     linkDirectFor(exec, *callLinkInfo, codeBlock, codePtr);
 }
 
 inline SlowPathReturnType virtualForWithFunction(
-    ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell, PtrTag resultTag)
+    ExecState* execCallee, CallLinkInfo* callLinkInfo, JSCell*& calleeAsFunctionCell)
 {
     ExecState* exec = execCallee->callerFrame();
     VM* vm = &exec->vm();
@@ -1134,12 +1131,12 @@
     calleeAsFunctionCell = getJSFunction(calleeAsValue);
     if (UNLIKELY(!calleeAsFunctionCell)) {
         if (jsDynamicCast<InternalFunction*>(*vm, calleeAsValue)) {
-            MacroAssemblerCodePtr codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
+            MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm->getCTIInternalFunctionTrampolineFor(kind);
             ASSERT(!!codePtr);
-            return encodeResult(codePtr.retagged(CodePtrTag, resultTag).executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
+            return encodeResult(codePtr.executableAddress(), reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
         }
         throwScope.release();
-        return handleHostCall(execCallee, calleeAsValue, callLinkInfo, resultTag);
+        return handleHostCall(execCallee, calleeAsValue, callLinkInfo);
     }
     
     JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
@@ -1147,12 +1144,11 @@
     ExecutableBase* executable = function->executable();
     if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
         FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
-        PtrTag throwExceptionTag = ptrTag(ThrowExceptionPtrTag, vm);
 
         if (!isCall(kind) && functionExecutable->constructAbility() == ConstructAbility::CannotConstruct) {
             throwException(exec, throwScope, createNotAConstructorError(exec, function));
             return encodeResult(
-                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
                 reinterpret_cast<void*>(KeepTheFrame));
         }
 
@@ -1161,23 +1157,20 @@
         EXCEPTION_ASSERT(throwScope.exception() == reinterpret_cast<Exception*>(error));
         if (error) {
             return encodeResult(
-                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode(throwExceptionTag, resultTag).executableAddress(),
+                vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).retaggedCode<JSEntryPtrTag>().executableAddress(),
                 reinterpret_cast<void*>(KeepTheFrame));
         }
     }
     return encodeResult(executable->entrypointFor(
-        kind, MustCheckArity).retagged(CodePtrTag, resultTag).executableAddress(),
+        kind, MustCheckArity).executableAddress(),
         reinterpret_cast<void*>(callLinkInfo->callMode() == CallMode::Tail ? ReuseTheFrame : KeepTheFrame));
 }
 
 SlowPathReturnType JIT_OPERATION operationLinkPolymorphicCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
 {
-    ExecState* exec = execCallee->callerFrame();
-    VM* vm = &exec->vm();
     ASSERT(callLinkInfo->specializationKind() == CodeForCall);
     JSCell* calleeAsFunctionCell;
-    PtrTag resultTag = ptrTag(LinkPolymorphicCallResultPtrTag, vm);
-    SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell, resultTag);
+    SlowPathReturnType result = virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCell);
 
     linkPolymorphicCall(execCallee, *callLinkInfo, CallVariant(calleeAsFunctionCell));
     
@@ -1186,11 +1179,8 @@
 
 SlowPathReturnType JIT_OPERATION operationVirtualCall(ExecState* execCallee, CallLinkInfo* callLinkInfo)
 {
-    ExecState* exec = execCallee->callerFrame();
-    VM* vm = &exec->vm();
     JSCell* calleeAsFunctionCellIgnored;
-    PtrTag resultTag = ptrTag(LinkVirtualCallResultPtrTag, vm);
-    return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored, resultTag);
+    return virtualForWithFunction(execCallee, callLinkInfo, calleeAsFunctionCellIgnored);
 }
 
 size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
@@ -1590,8 +1580,7 @@
         codeBlock->optimizeSoon();
         codeBlock->unlinkedCodeBlock()->setDidOptimize(TrueTriState);
         void* targetPC = vm.getCTIStub(DFG::osrEntryThunkGenerator).code().executableAddress();
-        PtrTag thunkTag = ptrTag(DFGOSREntryPtrTag, &vm);
-        targetPC = retagCodePtr(targetPC, thunkTag, bitwise_cast<PtrTag>(exec));
+        targetPC = retagCodePtr(targetPC, JITThunkPtrTag, bitwise_cast<PtrTag>(exec));
         return encodeResult(targetPC, dataBuffer);
     }
 
@@ -1642,10 +1631,8 @@
     switch (optimizedReplacement->jitType()) {
     case JITCode::DFGJIT:
     case JITCode::FTLJIT: {
-        void* entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
-        if (entry)
-            assertIsTaggedWith(entry, ExceptionHandlerPtrTag);
-        return static_cast<char*>(entry);
+        MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
+        return entry.executableAddress<char*>();
     }
     default:
         break;
@@ -1664,10 +1651,8 @@
     switch (optimizedReplacement->jitType()) {
     case JITCode::DFGJIT:
     case JITCode::FTLJIT: {
-        void* entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
-        if (entry)
-            assertIsTaggedWith(entry, ExceptionHandlerPtrTag);
-        return static_cast<char*>(entry);
+        MacroAssemblerCodePtr<ExceptionHandlerPtrTag> entry = DFG::prepareCatchOSREntry(exec, optimizedReplacement, bytecodeIndex);
+        return entry.executableAddress<char*>();
     }
     default:
         break;
@@ -1852,7 +1837,7 @@
         uint32_t i = subscript.asUInt32();
         if (isJSString(baseValue)) {
             if (asString(baseValue)->canGetIndex(i)) {
-                ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValString, GetPropertyPtrTag));
+                ctiPatchCallByReturnAddress(returnAddress, operationGetByValString);
                 scope.release();
                 return asString(baseValue)->getIndex(exec, i);
             }
@@ -1992,7 +1977,7 @@
     if (tryGetByValOptimize(exec, baseValue, subscript, byValInfo, returnAddress) == OptimizationResult::GiveUp) {
         // Don't ever try to optimize.
         byValInfo->tookSlowPath = true;
-        ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(operationGetByValGeneric, GetPropertyPtrTag));
+        ctiPatchCallByReturnAddress(returnAddress, operationGetByValGeneric);
     }
 
     return JSValue::encode(getByVal(exec, baseValue, subscript, byValInfo, returnAddress));
@@ -2032,7 +2017,7 @@
         if (++byValInfo->slowPathCount >= 10
             || object->structure(vm)->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
             // Don't ever try to optimize.
-            ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(operationHasIndexedPropertyGeneric, HasPropertyPtrTag));
+            ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), operationHasIndexedPropertyGeneric);
         }
     }
 
@@ -2093,7 +2078,7 @@
         if (!isJSString(baseValue)) {
             ASSERT(exec->bytecodeOffset());
             auto getByValFunction = byValInfo->stubRoutine ? operationGetByValGeneric : operationGetByValOptimize;
-            ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), FunctionPtr(getByValFunction, GetPropertyPtrTag));
+            ctiPatchCallByReturnAddress(ReturnAddressPtr(OUR_RETURN_ADDRESS), getByValFunction);
         }
     } else {
         baseValue.requireObjectCoercible(exec);
@@ -2242,7 +2227,7 @@
             result = jumpTable.ctiForValue((*value)[0]).executableAddress();
     }
 
-    assertIsTaggedWith(result, ptrTag(SwitchTablePtrTag, &jumpTable));
+    assertIsTaggedWith(result, JSSwitchPtrTag);
     return reinterpret_cast<char*>(result);
 }
 
@@ -2261,7 +2246,7 @@
         result = jumpTable.ctiForValue(static_cast<int32_t>(key.asDouble())).executableAddress();
     else
         result = jumpTable.ctiDefault.executableAddress();
-    assertIsTaggedWith(result, ptrTag(SwitchTablePtrTag, &jumpTable));
+    assertIsTaggedWith(result, JSSwitchPtrTag);
     return reinterpret_cast<char*>(result);
 }
 
@@ -2281,7 +2266,7 @@
     } else
         result = jumpTable.ctiDefault.executableAddress();
 
-    assertIsTaggedWith(result, ptrTag(SwitchTablePtrTag, &jumpTable));
+    assertIsTaggedWith(result, JSSwitchPtrTag);
     return reinterpret_cast<char*>(result);
 }
 
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
index 21b1a19..1b97da1 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -50,7 +50,7 @@
 namespace JSC {
 #if USE(JSVALUE64)
 
-JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
+JIT::CodeRef<JITThunkPtrTag> JIT::stringGetByValStubGenerator(VM* vm)
 {
     JSInterfaceJIT jit(vm);
     JumpList failures;
@@ -91,7 +91,7 @@
     jit.ret();
     
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, NearCodePtrTag, "String get_by_val stub");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "String get_by_val stub");
 }
 
 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
@@ -232,7 +232,7 @@
     Label coldPathBegin = label();
     gen.slowPathJump().link(this);
 
-    Call call = callOperationWithProfile(operationGetByIdOptimize, GetPropertyPtrTag, dst, gen.stubInfo(), regT0, propertyName.impl());
+    Call call = callOperationWithProfile(operationGetByIdOptimize, dst, gen.stubInfo(), regT0, propertyName.impl());
     gen.reportSlowPathCall(coldPathBegin, call);
     slowDoneCase = jump();
 
@@ -255,7 +255,7 @@
     Jump notString = branchStructure(NotEqual, 
         Address(regT0, JSCell::structureIDOffset()), 
         m_vm->stringStructure.get());
-    emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
+    emitNakedCall(CodeLocationLabel<NoPtrTag>(m_vm->getCTIStub(stringGetByValStubGenerator).retaggedCode<NoPtrTag>()));
     Jump failed = branchTest64(Zero, regT0);
     emitPutVirtualRegister(dst, regT0);
     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
@@ -270,7 +270,7 @@
     
     emitGetVirtualRegister(base, regT0);
     emitGetVirtualRegister(property, regT1);
-    Call call = callOperation(operationGetByValOptimize, GetPropertyPtrTag, dst, regT0, regT1, byValInfo);
+    Call call = callOperation(operationGetByValOptimize, dst, regT0, regT1, byValInfo);
 
     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
@@ -450,7 +450,7 @@
     Label coldPathBegin = label();
     gen.slowPathJump().link(this);
 
-    Call call = callOperation(gen.slowPathFunction(), PutPropertyPtrTag, gen.stubInfo(), regT1, regT0, propertyName.impl());
+    Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, propertyName.impl());
     gen.reportSlowPathCall(coldPathBegin, call);
     doneCases.append(jump());
 
@@ -487,7 +487,7 @@
     emitGetVirtualRegister(property, regT1);
     emitGetVirtualRegister(value, regT2);
     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
-    Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, PutPropertyPtrTag, regT0, regT1, regT2, byValInfo);
+    Call call = callOperation(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, regT0, regT1, regT2, byValInfo);
 
     m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
     m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
@@ -588,7 +588,7 @@
 
     Label coldPathBegin = label();
 
-    Call call = callOperation(operationTryGetByIdOptimize, GetPropertyPtrTag, resultVReg, gen.stubInfo(), regT0, ident->impl());
+    Call call = callOperation(operationTryGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
     
     gen.reportSlowPathCall(coldPathBegin, call);
 }
@@ -625,7 +625,7 @@
 
     Label coldPathBegin = label();
 
-    Call call = callOperationWithProfile(operationGetByIdDirectOptimize, GetPropertyPtrTag, resultVReg, gen.stubInfo(), regT0, ident->impl());
+    Call call = callOperationWithProfile(operationGetByIdDirectOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
 
     gen.reportSlowPathCall(coldPathBegin, call);
 }
@@ -688,7 +688,7 @@
     
     Label coldPathBegin = label();
 
-    Call call = callOperationWithProfile(operationGetByIdOptimize, GetPropertyPtrTag, resultVReg, gen.stubInfo(), regT0, ident->impl());
+    Call call = callOperationWithProfile(operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
 
     gen.reportSlowPathCall(coldPathBegin, call);
 }
@@ -704,7 +704,7 @@
     
     Label coldPathBegin = label();
 
-    Call call = callOperationWithProfile(operationGetByIdWithThisOptimize, GetPropertyPtrTag, resultVReg, gen.stubInfo(), regT0, regT1, ident->impl());
+    Call call = callOperationWithProfile(operationGetByIdWithThisOptimize, resultVReg, gen.stubInfo(), regT0, regT1, ident->impl());
 
     gen.reportSlowPathCall(coldPathBegin, call);
 }
@@ -746,7 +746,7 @@
     
     JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
 
-    Call call = callOperation(gen.slowPathFunction(), PutPropertyPtrTag, gen.stubInfo(), regT1, regT0, ident->impl());
+    Call call = callOperation(gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
 
     gen.reportSlowPathCall(coldPathBegin, call);
 }
@@ -1279,17 +1279,17 @@
 
     LinkBuffer patchBuffer(*this, m_codeBlock);
     
-    patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     
     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
     
     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-        m_codeBlock, patchBuffer, NearCodePtrTag,
+        m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
         "Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
     
-    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric, GetPropertyPtrTag));
+    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationGetByValGeneric));
 }
 
 void JIT::privateCompileGetByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, const Identifier& propertyName)
@@ -1304,7 +1304,7 @@
 
     ConcurrentJSLocker locker(m_codeBlock->m_lock);
     LinkBuffer patchBuffer(*this, m_codeBlock);
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     patchBuffer.link(fastDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
     patchBuffer.link(slowDoneCase, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToNextHotPath));
     if (!m_exceptionChecks.empty())
@@ -1317,12 +1317,12 @@
     gen.finalize(patchBuffer);
 
     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-        m_codeBlock, patchBuffer, NearCodePtrTag,
+        m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
         "Baseline get_by_val with cached property name '%s' stub for %s, return point %p", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value());
     byValInfo->stubInfo = gen.stubInfo();
 
-    MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationGetByValGeneric, GetPropertyPtrTag));
+    MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(operationGetByValGeneric));
 }
 
 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
@@ -1361,8 +1361,8 @@
     Jump done = jump();
 
     LinkBuffer patchBuffer(*this, m_codeBlock);
-    patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(badType, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
     if (needsLinkForWriteBarrier) {
         ASSERT(removeCodePtrTag(m_calls.last().callee.executableAddress()) == removeCodePtrTag(operationWriteBarrierSlowPath));
@@ -1372,16 +1372,16 @@
     bool isDirect = Interpreter::getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
     if (!isDirect) {
         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-            m_codeBlock, patchBuffer, NearCodePtrTag,
+            m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
             "Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
         
     } else {
         byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-            m_codeBlock, patchBuffer, NearCodePtrTag,
+            m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
             "Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value());
     }
-    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric, PutPropertyPtrTag));
+    MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
 }
 
 void JIT::privateCompilePutByValWithCachedId(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, PutKind putKind, const Identifier& propertyName)
@@ -1395,7 +1395,7 @@
 
     ConcurrentJSLocker locker(m_codeBlock->m_lock);
     LinkBuffer patchBuffer(*this, m_codeBlock);
-    patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
+    patchBuffer.link(slowCases, CodeLocationLabel<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
     patchBuffer.link(doneCases, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
     if (!m_exceptionChecks.empty())
         patchBuffer.link(m_exceptionChecks, byValInfo->exceptionHandler);
@@ -1407,12 +1407,12 @@
     gen.finalize(patchBuffer);
 
     byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
-        m_codeBlock, patchBuffer, NearCodePtrTag,
+        m_codeBlock, patchBuffer, JITStubRoutinePtrTag,
         "Baseline put_by_val%s with cached property name '%s' stub for %s, return point %p", (putKind == Direct) ? "_direct" : "", propertyName.impl()->utf8().data(), toCString(*m_codeBlock).data(), returnAddress.value());
     byValInfo->stubInfo = gen.stubInfo();
 
-    MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
-    MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric, PutPropertyPtrTag));
+    MacroAssembler::repatchJump(byValInfo->notIndexJump, CodeLocationLabel<JITStubRoutinePtrTag>(byValInfo->stubRoutine->code().code()));
+    MacroAssembler::repatchCall(CodeLocationCall<NoPtrTag>(MacroAssemblerCodePtr<NoPtrTag>(returnAddress)), FunctionPtr<OperationPtrTag>(putKind == Direct ? operationDirectPutByValGeneric : operationPutByValGeneric));
 }
 
 
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
index 0ceb957..fda846d 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
@@ -128,7 +128,7 @@
     callOperation(operationDeleteByValJSResult, dst, JSValueRegs(regT1, regT0), JSValueRegs(regT3, regT2));
 }
 
-JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
+JIT::CodeRef<JITThunkPtrTag> JIT::stringGetByValStubGenerator(VM* vm)
 {
     JSInterfaceJIT jit(vm);
     JumpList failures;
@@ -167,7 +167,7 @@
     jit.ret();
     
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, NoPtrTag, "String get_by_val stub");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "String get_by_val stub");
 }
 
 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
@@ -309,7 +309,7 @@
     Jump nonCell = jump();
     linkSlowCase(iter); // base array check
     Jump notString = branchStructure(NotEqual, Address(regT0, JSCell::structureIDOffset()), m_vm->stringStructure.get());
-    emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
+    emitNakedCall(CodeLocationLabel<NoPtrTag>(m_vm->getCTIStub(stringGetByValStubGenerator).retaggedCode<NoPtrTag>()));
     Jump failed = branchTestPtr(Zero, regT0);
     emitStore(dst, regT1, regT0);
     emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
@@ -557,7 +557,7 @@
     poke(regT1, pokeOffset++);
     poke(regT0, pokeOffset++);
     poke(TrustedImmPtr(byValInfo), pokeOffset++);
-    Call call = appendCallWithExceptionCheck(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize, NoPtrTag);
+    Call call = appendCallWithExceptionCheck(isDirect ? operationDirectPutByValOptimize : operationPutByValOptimize);
 #else
     // The register selection below is chosen to reduce register swapping on ARM.
     // Swapping shouldn't happen on other platforms.
diff --git a/Source/JavaScriptCore/jit/JITStubRoutine.h b/Source/JavaScriptCore/jit/JITStubRoutine.h
index b8b3047..33d2a36 100644
--- a/Source/JavaScriptCore/jit/JITStubRoutine.h
+++ b/Source/JavaScriptCore/jit/JITStubRoutine.h
@@ -49,7 +49,7 @@
     WTF_MAKE_NONCOPYABLE(JITStubRoutine);
     WTF_MAKE_FAST_ALLOCATED;
 public:
-    JITStubRoutine(const MacroAssemblerCodeRef& code)
+    JITStubRoutine(const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code)
         : m_code(code)
         , m_refCount(1)
     {
@@ -58,9 +58,9 @@
     // Use this if you want to pass a CodePtr to someone who insists on taking
     // a RefPtr<JITStubRoutine>.
     static Ref<JITStubRoutine> createSelfManagedRoutine(
-        MacroAssemblerCodePtr rawCodePointer)
+        MacroAssemblerCodePtr<JITStubRoutinePtrTag> rawCodePointer)
     {
-        return adoptRef(*new JITStubRoutine(MacroAssemblerCodeRef::createSelfManagedCodeRef(rawCodePointer)));
+        return adoptRef(*new JITStubRoutine(MacroAssemblerCodeRef<JITStubRoutinePtrTag>::createSelfManagedCodeRef(rawCodePointer)));
     }
     
     virtual ~JITStubRoutine();
@@ -69,11 +69,11 @@
     // MacroAssemblerCodeRef is copyable, but at the cost of reference
     // counting churn. Returning a reference is a good way of reducing
     // the churn.
-    const MacroAssemblerCodeRef& code() const { return m_code; }
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& code() const { return m_code; }
     
-    static MacroAssemblerCodePtr asCodePtr(Ref<JITStubRoutine>&& stubRoutine)
+    static MacroAssemblerCodePtr<JITStubRoutinePtrTag> asCodePtr(Ref<JITStubRoutine>&& stubRoutine)
     {
-        MacroAssemblerCodePtr result = stubRoutine->code().code();
+        MacroAssemblerCodePtr<JITStubRoutinePtrTag> result = stubRoutine->code().code();
         ASSERT(!!result);
         return result;
     }
@@ -109,7 +109,7 @@
 protected:
     virtual void observeZeroRefCount();
 
-    MacroAssemblerCodeRef m_code;
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> m_code;
     unsigned m_refCount;
 };
 
diff --git a/Source/JavaScriptCore/jit/JITThunks.cpp b/Source/JavaScriptCore/jit/JITThunks.cpp
index f865b0e..5f8c02b 100644
--- a/Source/JavaScriptCore/jit/JITThunks.cpp
+++ b/Source/JavaScriptCore/jit/JITThunks.cpp
@@ -45,46 +45,46 @@
 {
 }
 
-MacroAssemblerCodePtr JITThunks::ctiNativeCall(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeCall(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, nativeCallGenerator).code();
 }
 
-MacroAssemblerCodePtr JITThunks::ctiNativeConstruct(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeConstruct(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, nativeConstructGenerator).code();
 }
 
-MacroAssemblerCodePtr JITThunks::ctiNativeTailCall(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCall(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, nativeTailCallGenerator).code();
 }
 
-MacroAssemblerCodePtr JITThunks::ctiNativeTailCallWithoutSavedTags(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiNativeTailCallWithoutSavedTags(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, nativeTailCallWithoutSavedTagsGenerator).code();
 }
 
-MacroAssemblerCodePtr JITThunks::ctiInternalFunctionCall(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionCall(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, internalFunctionCallGenerator).code();
 }
 
-MacroAssemblerCodePtr JITThunks::ctiInternalFunctionConstruct(VM* vm)
+MacroAssemblerCodePtr<JITThunkPtrTag> JITThunks::ctiInternalFunctionConstruct(VM* vm)
 {
     ASSERT(VM::canUseJIT());
     return ctiStub(vm, internalFunctionConstructGenerator).code();
 }
 
-MacroAssemblerCodeRef JITThunks::ctiStub(VM* vm, ThunkGenerator generator)
+MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::ctiStub(VM* vm, ThunkGenerator generator)
 {
     LockHolder locker(m_lock);
-    CTIStubMap::AddResult entry = m_ctiStubMap.add(generator, MacroAssemblerCodeRef());
+    CTIStubMap::AddResult entry = m_ctiStubMap.add(generator, MacroAssemblerCodeRef<JITThunkPtrTag>());
     if (entry.isNewEntry) {
         // Compilation thread can only retrieve existing entries.
         ASSERT(!isCompilationThread());
@@ -93,12 +93,12 @@
     return entry.iterator->value;
 }
 
-MacroAssemblerCodeRef JITThunks::existingCTIStub(ThunkGenerator generator)
+MacroAssemblerCodeRef<JITThunkPtrTag> JITThunks::existingCTIStub(ThunkGenerator generator)
 {
     LockHolder locker(m_lock);
     CTIStubMap::iterator entry = m_ctiStubMap.find(generator);
     if (entry == m_ctiStubMap.end())
-        return MacroAssemblerCodeRef();
+        return MacroAssemblerCodeRef<JITThunkPtrTag>();
     return entry->value;
 }
 
@@ -123,12 +123,12 @@
 
     RefPtr<JITCode> forCall;
     if (generator) {
-        MacroAssemblerCodeRef entry = generator(vm);
+        MacroAssemblerCodeRef<JSEntryPtrTag> entry = generator(vm).retagged<JSEntryPtrTag>();
         forCall = adoptRef(new DirectJITCode(entry, entry.code(), JITCode::HostCallThunk));
     } else
-        forCall = adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeCall(vm)), JITCode::HostCallThunk));
+        forCall = adoptRef(new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeCall(vm).retagged<JSEntryPtrTag>()), JITCode::HostCallThunk));
     
-    Ref<JITCode> forConstruct = adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct(vm)), JITCode::HostCallThunk));
+    Ref<JITCode> forConstruct = adoptRef(*new NativeJITCode(MacroAssemblerCodeRef<JSEntryPtrTag>::createSelfManagedCodeRef(ctiNativeConstruct(vm).retagged<JSEntryPtrTag>()), JITCode::HostCallThunk));
     
     NativeExecutable* nativeExecutable = NativeExecutable::create(*vm, forCall.releaseNonNull(), function, WTFMove(forConstruct), constructor, intrinsic, signature, name);
     weakAdd(*m_hostFunctionStubMap, std::make_tuple(function, constructor, name), Weak<NativeExecutable>(nativeExecutable, this));
diff --git a/Source/JavaScriptCore/jit/JITThunks.h b/Source/JavaScriptCore/jit/JITThunks.h
index 01f856e..bd72208 100644
--- a/Source/JavaScriptCore/jit/JITThunks.h
+++ b/Source/JavaScriptCore/jit/JITThunks.h
@@ -51,15 +51,15 @@
     JITThunks();
     virtual ~JITThunks();
 
-    MacroAssemblerCodePtr ctiNativeCall(VM*);
-    MacroAssemblerCodePtr ctiNativeConstruct(VM*);
-    MacroAssemblerCodePtr ctiNativeTailCall(VM*);
-    MacroAssemblerCodePtr ctiNativeTailCallWithoutSavedTags(VM*);
-    MacroAssemblerCodePtr ctiInternalFunctionCall(VM*);
-    MacroAssemblerCodePtr ctiInternalFunctionConstruct(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiNativeCall(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiNativeConstruct(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiNativeTailCall(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiNativeTailCallWithoutSavedTags(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiInternalFunctionCall(VM*);
+    MacroAssemblerCodePtr<JITThunkPtrTag> ctiInternalFunctionConstruct(VM*);
 
-    MacroAssemblerCodeRef ctiStub(VM*, ThunkGenerator);
-    MacroAssemblerCodeRef existingCTIStub(ThunkGenerator);
+    MacroAssemblerCodeRef<JITThunkPtrTag> ctiStub(VM*, ThunkGenerator);
+    MacroAssemblerCodeRef<JITThunkPtrTag> existingCTIStub(ThunkGenerator);
 
     NativeExecutable* hostFunctionStub(VM*, TaggedNativeFunction, TaggedNativeFunction constructor, const String& name);
     NativeExecutable* hostFunctionStub(VM*, TaggedNativeFunction, TaggedNativeFunction constructor, ThunkGenerator, Intrinsic, const DOMJIT::Signature*, const String& name);
@@ -70,7 +70,7 @@
 private:
     void finalize(Handle<Unknown>, void* context) override;
     
-    typedef HashMap<ThunkGenerator, MacroAssemblerCodeRef> CTIStubMap;
+    typedef HashMap<ThunkGenerator, MacroAssemblerCodeRef<JITThunkPtrTag>> CTIStubMap;
     CTIStubMap m_ctiStubMap;
 
     typedef std::tuple<TaggedNativeFunction, TaggedNativeFunction, String> HostFunctionKey;
diff --git a/Source/JavaScriptCore/jit/PCToCodeOriginMap.cpp b/Source/JavaScriptCore/jit/PCToCodeOriginMap.cpp
index 63a46b8..12a4d35 100644
--- a/Source/JavaScriptCore/jit/PCToCodeOriginMap.cpp
+++ b/Source/JavaScriptCore/jit/PCToCodeOriginMap.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -204,20 +204,20 @@
             codeOriginCompressor.write<uintptr_t>(bitwise_cast<uintptr_t>(codeOrigin.inlineCallFrame));
     };
 
-    m_pcRangeStart = linkBuffer.locationOf(builder.m_codeRanges.first().start).dataLocation<uintptr_t>();
-    m_pcRangeEnd = linkBuffer.locationOf(builder.m_codeRanges.last().end).dataLocation<uintptr_t>();
+    m_pcRangeStart = linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges.first().start).dataLocation<uintptr_t>();
+    m_pcRangeEnd = linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges.last().end).dataLocation<uintptr_t>();
     m_pcRangeEnd -= 1;
 
     for (unsigned i = 0; i < builder.m_codeRanges.size(); i++) {
         PCToCodeOriginMapBuilder::CodeRange& codeRange = builder.m_codeRanges[i];
-        void* start = linkBuffer.locationOf(codeRange.start).dataLocation();
-        void* end = linkBuffer.locationOf(codeRange.end).dataLocation();
+        void* start = linkBuffer.locationOf<NoPtrTag>(codeRange.start).dataLocation();
+        void* end = linkBuffer.locationOf<NoPtrTag>(codeRange.end).dataLocation();
         ASSERT(m_pcRangeStart <= bitwise_cast<uintptr_t>(start));
         ASSERT(m_pcRangeEnd >= bitwise_cast<uintptr_t>(end) - 1);
         if (start == end)
             ASSERT(i == builder.m_codeRanges.size() - 1);
         if (i > 0)
-            ASSERT(linkBuffer.locationOf(builder.m_codeRanges[i - 1].end).dataLocation() == start);
+            ASSERT(linkBuffer.locationOf<NoPtrTag>(builder.m_codeRanges[i - 1].end).dataLocation() == start);
 
         buildPCTable(start);
         buildCodeOriginTable(codeRange.codeOrigin);
diff --git a/Source/JavaScriptCore/jit/PCToCodeOriginMap.h b/Source/JavaScriptCore/jit/PCToCodeOriginMap.h
index 0c6827d..b3911b0 100644
--- a/Source/JavaScriptCore/jit/PCToCodeOriginMap.h
+++ b/Source/JavaScriptCore/jit/PCToCodeOriginMap.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -74,6 +74,7 @@
     bool m_shouldBuildMapping;
 };
 
+// FIXME: <rdar://problem/39436658>
 class PCToCodeOriginMap {
     WTF_MAKE_NONCOPYABLE(PCToCodeOriginMap);
 public:
diff --git a/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.cpp b/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.cpp
index bc0f841..58f6283 100644
--- a/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.cpp
+++ b/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -69,7 +69,7 @@
 }
 
 PolymorphicCallStubRoutine::PolymorphicCallStubRoutine(
-    const MacroAssemblerCodeRef& codeRef, VM& vm, const JSCell* owner, ExecState* callerFrame,
+    const MacroAssemblerCodeRef<JITStubRoutinePtrTag>& codeRef, VM& vm, const JSCell* owner, ExecState* callerFrame,
     CallLinkInfo& info, const Vector<PolymorphicCallCase>& cases,
     UniqueArray<uint32_t>&& fastCounts)
     : GCAwareJITStubRoutine(codeRef, vm)
diff --git a/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.h b/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.h
index 2a39577..5647412 100644
--- a/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.h
+++ b/Source/JavaScriptCore/jit/PolymorphicCallStubRoutine.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -83,7 +83,7 @@
 class PolymorphicCallStubRoutine : public GCAwareJITStubRoutine {
 public:
     PolymorphicCallStubRoutine(
-        const MacroAssemblerCodeRef&, VM&, const JSCell* owner,
+        const MacroAssemblerCodeRef<JITStubRoutinePtrTag>&, VM&, const JSCell* owner,
         ExecState* callerFrame, CallLinkInfo&, const Vector<PolymorphicCallCase>&,
         UniqueArray<uint32_t>&& fastCounts);
     
diff --git a/Source/JavaScriptCore/jit/Repatch.cpp b/Source/JavaScriptCore/jit/Repatch.cpp
index 6617332..0d51615 100644
--- a/Source/JavaScriptCore/jit/Repatch.cpp
+++ b/Source/JavaScriptCore/jit/Repatch.cpp
@@ -66,40 +66,37 @@
 
 namespace JSC {
 
-static FunctionPtr readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
+static FunctionPtr<CFunctionPtrTag> readPutICCallTarget(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call)
 {
-    FunctionPtr target = MacroAssembler::readCallTarget(call);
+    FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
 #if ENABLE(FTL_JIT)
     if (codeBlock->jitType() == JITCode::FTLJIT) {
-        MacroAssemblerCodePtr slowPathThunk = MacroAssemblerCodePtr::createFromExecutableAddress(target.executableAddress());
-        auto* callTarget = codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(slowPathThunk).callTarget();
-        return FunctionPtr(callTarget, CFunctionPtrTag);
+        MacroAssemblerCodePtr<JITThunkPtrTag> thunk = MacroAssemblerCodePtr<OperationPtrTag>::createFromExecutableAddress(target.executableAddress()).retagged<JITThunkPtrTag>();
+        return codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(thunk).callTarget().retagged<CFunctionPtrTag>();
     }
 #else
     UNUSED_PARAM(codeBlock);
 #endif // ENABLE(FTL_JIT)
-    return FunctionPtr(untagCFunctionPtr(target.executableAddress(), PutPropertyPtrTag), CFunctionPtrTag);
+    return target.retagged<CFunctionPtrTag>();
 }
 
-void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction, PtrTag callTag)
+void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall<JSInternalPtrTag> call, FunctionPtr<CFunctionPtrTag> newCalleeFunction)
 {
 #if ENABLE(FTL_JIT)
     if (codeBlock->jitType() == JITCode::FTLJIT) {
         VM& vm = *codeBlock->vm();
         FTL::Thunks& thunks = *vm.ftlThunks;
-        FunctionPtr target = MacroAssembler::readCallTarget(call);
-        MacroAssemblerCodePtr slowPathThunk = MacroAssemblerCodePtr::createFromExecutableAddress(target.executableAddress());
+        FunctionPtr<OperationPtrTag> target = MacroAssembler::readCallTarget<OperationPtrTag>(call);
+        auto slowPathThunk = MacroAssemblerCodePtr<JITThunkPtrTag>::createFromExecutableAddress(target.retaggedExecutableAddress<JITThunkPtrTag>());
         FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(slowPathThunk);
-        key = key.withCallTarget(newCalleeFunction.executableAddress());
-        newCalleeFunction = FunctionPtr(thunks.getSlowPathCallThunk(key).code());
-        assertIsTaggedWith(newCalleeFunction.executableAddress(), key.callPtrTag());
-        MacroAssembler::repatchCall(call, newCalleeFunction);
+        key = key.withCallTarget(newCalleeFunction);
+        MacroAssembler::repatchCall(call, FunctionPtr<OperationPtrTag>(thunks.getSlowPathCallThunk(key).retaggedCode<OperationPtrTag>()));
         return;
     }
 #else // ENABLE(FTL_JIT)
     UNUSED_PARAM(codeBlock);
 #endif // ENABLE(FTL_JIT)
-    MacroAssembler::repatchCall(call, FunctionPtr(newCalleeFunction, callTag));
+    MacroAssembler::repatchCall(call, newCalleeFunction.retagged<OperationPtrTag>());
 }
 
 enum InlineCacheAction {
@@ -150,7 +147,7 @@
     }
 }
 
-inline FunctionPtr appropriateOptimizingGetByIdFunction(GetByIDKind kind)
+inline FunctionPtr<CFunctionPtrTag> appropriateOptimizingGetByIdFunction(GetByIDKind kind)
 {
     switch (kind) {
     case GetByIDKind::Normal:
@@ -166,7 +163,7 @@
     return operationGetById;
 }
 
-inline FunctionPtr appropriateGetByIdFunction(GetByIDKind kind)
+inline FunctionPtr<CFunctionPtrTag> appropriateGetByIdFunction(GetByIDKind kind)
 {
     switch (kind) {
     case GetByIDKind::Normal:
@@ -210,7 +207,7 @@
 
                     bool generatedCodeInline = InlineAccess::generateArrayLength(stubInfo, jsCast<JSArray*>(baseCell));
                     if (generatedCodeInline) {
-                        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
+                        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
                         stubInfo.initArrayLength();
                         return RetryCacheLater;
                     }
@@ -267,7 +264,7 @@
                 if (generatedCodeInline) {
                     LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
                     structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
-                    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
+                    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
                     stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
                     return RetryCacheLater;
                 }
@@ -376,7 +373,7 @@
             LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(vm), propertyName));
             
             RELEASE_ASSERT(result.code());
-            InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
+            InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
         }
     }
 
@@ -391,7 +388,7 @@
     
     if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache) {
         CodeBlock* codeBlock = exec->codeBlock();
-        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind), GetPropertyPtrTag);
+        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGetByIdFunction(kind));
     }
 }
 
@@ -463,7 +460,7 @@
                     bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(stubInfo, structure, slot.cachedOffset());
                     if (generatedCodeInline) {
                         LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
-                        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind), PutPropertyPtrTag);
+                        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
                         stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
                         return RetryCacheLater;
                     }
@@ -582,7 +579,7 @@
             
             RELEASE_ASSERT(result.code());
 
-            InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel(result.code()));
+            InlineAccess::rewireStubAsJump(stubInfo, CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
         }
     }
 
@@ -597,7 +594,7 @@
     
     if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache) {
         CodeBlock* codeBlock = exec->codeBlock();
-        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind), PutPropertyPtrTag);
+        ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
     }
 }
 
@@ -671,7 +668,7 @@
 
             MacroAssembler::repatchJump(
                 stubInfo.patchableJumpForIn(),
-                CodeLocationLabel(result.code()));
+                CodeLocationLabel<JITStubRoutinePtrTag>(result.code()));
         }
     }
 
@@ -686,24 +683,23 @@
 {
     SuperSamplerScope superSamplerScope(false);
     if (tryCacheIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
-        ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn, CFunctionPtrTag);
+        ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
 }
 
-static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef, PtrTag linkTag)
+static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
 {
-    MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.retaggedCode(linkTag, NearCodePtrTag)));
+    MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel<JITStubRoutinePtrTag>(codeRef.code()));
 }
 
-static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator, PtrTag linkTag)
+static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
 {
-    linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator), linkTag);
+    linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator).retagged<JITStubRoutinePtrTag>());
 }
 
 static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
 {
-    PtrTag linkTag = ptrTag(LinkVirtualCallPtrTag, vm);
-    MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
-    linkSlowFor(vm, callLinkInfo, virtualThunk, linkTag);
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(vm, callLinkInfo);
+    linkSlowFor(vm, callLinkInfo, virtualThunk);
     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
 }
 
@@ -721,7 +717,7 @@
 
 void linkFor(
     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
-    JSObject* callee, MacroAssemblerCodePtr codePtr, PtrTag codeTag)
+    JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
 {
     ASSERT(!callLinkInfo.stub());
 
@@ -743,14 +739,13 @@
     if (shouldDumpDisassemblyFor(callerCodeBlock))
         dataLog("Linking call in ", FullCodeOrigin(callerCodeBlock, callLinkInfo.codeOrigin()), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
 
-    MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr.retagged(codeTag, NearCodePtrTag)));
+    MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
 
     if (calleeCodeBlock)
         calleeCodeBlock->linkIncomingCall(callerFrame, &callLinkInfo);
 
     if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
-        PtrTag linkTag = ptrTag(LinkPolymorphicCallPtrTag, &vm);
-        linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator, linkTag);
+        linkSlowFor(&vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
         return;
     }
     
@@ -759,7 +754,7 @@
 
 void linkDirectFor(
     ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
-    MacroAssemblerCodePtr codePtr)
+    MacroAssemblerCodePtr<JSEntryPtrTag> codePtr)
 {
     ASSERT(!callLinkInfo.stub());
     
@@ -774,8 +769,8 @@
 
     if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
         MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
-    MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
-    
+    MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel<JSEntryPtrTag>(codePtr));
+
     if (calleeCodeBlock)
         calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
 }
@@ -789,7 +784,7 @@
     linkSlowFor(vm, callLinkInfo);
 }
 
-static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef, PtrTag codeTag)
+static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef<JITStubRoutinePtrTag> codeRef)
 {
     if (callLinkInfo.isDirect()) {
         callLinkInfo.clearCodeBlock();
@@ -801,7 +796,7 @@
         MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
             MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
             static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
-        linkSlowFor(vm, callLinkInfo, codeRef, codeTag);
+        linkSlowFor(vm, callLinkInfo, codeRef);
         callLinkInfo.clearCallee();
     }
     callLinkInfo.clearSeen();
@@ -816,8 +811,7 @@
     if (Options::dumpDisassembly())
         dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
     
-    PtrTag linkTag = ptrTag(LinkCallPtrTag, &vm);
-    revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator), linkTag);
+    revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator).retagged<JITStubRoutinePtrTag>());
 }
 
 void linkVirtualFor(ExecState* exec, CallLinkInfo& callLinkInfo)
@@ -829,16 +823,15 @@
     if (shouldDumpDisassemblyFor(callerCodeBlock))
         dataLog("Linking virtual call at ", FullCodeOrigin(callerCodeBlock, callerFrame->codeOrigin()), "\n");
 
-    PtrTag linkTag = ptrTag(LinkVirtualCallPtrTag, &vm);
-    MacroAssemblerCodeRef virtualThunk = virtualThunkFor(&vm, callLinkInfo);
-    revertCall(&vm, callLinkInfo, virtualThunk, linkTag);
+    MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunk = virtualThunkFor(&vm, callLinkInfo);
+    revertCall(&vm, callLinkInfo, virtualThunk);
     callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, vm, nullptr, true));
 }
 
 namespace {
 struct CallToCodePtr {
     CCallHelpers::Call call;
-    MacroAssemblerCodePtr codePtr;
+    MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
 };
 } // annonymous namespace
 
@@ -1034,7 +1027,7 @@
         
         CallVariant variant = callCases[caseIndex].variant();
         
-        MacroAssemblerCodePtr codePtr;
+        MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
         if (variant.executable()) {
             ASSERT(variant.executable()->hasJITCodeForCall());
             codePtr = variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
@@ -1080,7 +1073,7 @@
 #endif
     }
     stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
-    stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
+    stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().untaggedExecutableAddress()), GPRInfo::regT4);
     
     stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
     AssemblyHelpers::Jump slow = stubJit.jump();
@@ -1098,21 +1091,20 @@
         // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
         bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
         void* target = isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress();
-        patchBuffer.link(callToCodePtr.call, FunctionPtr(MacroAssemblerCodePtr::createFromExecutableAddress(target)));
+        patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(MacroAssemblerCodePtr<JSEntrtPtrTag>::createFromExecutableAddress(target)));
 #else
-        patchBuffer.link(callToCodePtr.call, FunctionPtr(callToCodePtr.codePtr.retagged(CodePtrTag, NearCodePtrTag)));
+        patchBuffer.link(callToCodePtr.call, FunctionPtr<JSEntryPtrTag>(callToCodePtr.codePtr));
 #endif
     }
     if (isWebAssembly || JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
         patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
     else
         patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
-    PtrTag linkTag = ptrTag(LinkPolymorphicCallPtrTag, &vm);
-    patchBuffer.link(slow, CodeLocationLabel(vm.getCTIStub(linkPolymorphicCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag)));
+    patchBuffer.link(slow, CodeLocationLabel<JITThunkPtrTag>(vm.getCTIStub(linkPolymorphicCallThunkGenerator).code()));
     
     auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
         FINALIZE_CODE_FOR(
-            callerCodeBlock, patchBuffer, NearCodePtrTag,
+            callerCodeBlock, patchBuffer, JITStubRoutinePtrTag,
             "Polymorphic call stub for %s, return point %p, targets %s",
                 isWebAssembly ? "WebAssembly" : toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
                 toCString(listDump(callCases)).data()),
@@ -1121,7 +1113,7 @@
     
     MacroAssembler::replaceWithJump(
         MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
-        CodeLocationLabel(stubRoutine->code().code()));
+        CodeLocationLabel<JITStubRoutinePtrTag>(stubRoutine->code().code()));
     // The original slow path is unreachable on 64-bits, but still
     // reachable on 32-bits since a non-cell callee will always
     // trigger the slow path
@@ -1139,7 +1131,7 @@
 
 void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
 {
-    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind), GetPropertyPtrTag);
+    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
 }
 
@@ -1158,7 +1150,7 @@
         optimizedFunction = operationPutByIdDirectNonStrictOptimize;
     }
 
-    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction, PutPropertyPtrTag);
+    ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
     InlineAccess::rewireStubAsJump(stubInfo, stubInfo.slowPathStartLocation());
 }
 
diff --git a/Source/JavaScriptCore/jit/Repatch.h b/Source/JavaScriptCore/jit/Repatch.h
index 30adc68..125db5f 100644
--- a/Source/JavaScriptCore/jit/Repatch.h
+++ b/Source/JavaScriptCore/jit/Repatch.h
@@ -45,8 +45,8 @@
 void repatchPutByID(ExecState*, JSValue, Structure*, const Identifier&, const PutPropertySlot&, StructureStubInfo&, PutKind);
 void buildPutByIdList(ExecState*, JSValue, Structure*, const Identifier&, const PutPropertySlot&, StructureStubInfo&, PutKind);
 void repatchIn(ExecState*, JSCell*, const Identifier&, bool wasFound, const PropertySlot&, StructureStubInfo&);
-void linkFor(ExecState*, CallLinkInfo&, CodeBlock*, JSObject* callee, MacroAssemblerCodePtr, PtrTag);
-void linkDirectFor(ExecState*, CallLinkInfo&, CodeBlock*, MacroAssemblerCodePtr);
+void linkFor(ExecState*, CallLinkInfo&, CodeBlock*, JSObject* callee, MacroAssemblerCodePtr<JSEntryPtrTag>);
+void linkDirectFor(ExecState*, CallLinkInfo&, CodeBlock*, MacroAssemblerCodePtr<JSEntryPtrTag>);
 void linkSlowFor(ExecState*, CallLinkInfo&);
 void unlinkFor(VM&, CallLinkInfo&);
 void linkVirtualFor(ExecState*, CallLinkInfo&);
@@ -54,7 +54,7 @@
 void resetGetByID(CodeBlock*, StructureStubInfo&, GetByIDKind);
 void resetPutByID(CodeBlock*, StructureStubInfo&);
 void resetIn(CodeBlock*, StructureStubInfo&);
-void ftlThunkAwareRepatchCall(CodeBlock*, CodeLocationCall, FunctionPtr newCalleeFunction, PtrTag callTag);
+void ftlThunkAwareRepatchCall(CodeBlock*, CodeLocationCall<JSInternalPtrTag>, FunctionPtr<CFunctionPtrTag> newCalleeFunction);
 
 } // namespace JSC
 
diff --git a/Source/JavaScriptCore/jit/SlowPathCall.h b/Source/JavaScriptCore/jit/SlowPathCall.h
index 75dcc99..37c9427 100644
--- a/Source/JavaScriptCore/jit/SlowPathCall.h
+++ b/Source/JavaScriptCore/jit/SlowPathCall.h
@@ -62,9 +62,8 @@
         m_jit->move(JIT::callFrameRegister, JIT::argumentGPR0);
         m_jit->move(JIT::TrustedImmPtr(m_pc), JIT::argumentGPR1);
 #endif
-        PtrTag tag = ptrTag(SlowPathPtrTag, nextPtrTagID());
-        JIT::Call call = m_jit->call(tag);
-        m_jit->m_calls.append(CallRecord(call, m_jit->m_bytecodeOffset, FunctionPtr(m_slowPathFunction, tag)));
+        JIT::Call call = m_jit->call(OperationPtrTag);
+        m_jit->m_calls.append(CallRecord(call, m_jit->m_bytecodeOffset, FunctionPtr<OperationPtrTag>(m_slowPathFunction)));
 
 #if CPU(X86) && USE(JSVALUE32_64)
         m_jit->addPtr(MacroAssembler::TrustedImm32(16), MacroAssembler::stackPointerRegister);
diff --git a/Source/JavaScriptCore/jit/SpecializedThunkJIT.h b/Source/JavaScriptCore/jit/SpecializedThunkJIT.h
index 36043fa..f282321 100644
--- a/Source/JavaScriptCore/jit/SpecializedThunkJIT.h
+++ b/Source/JavaScriptCore/jit/SpecializedThunkJIT.h
@@ -164,25 +164,23 @@
             ret();
         }
         
-        MacroAssemblerCodeRef finalize(MacroAssemblerCodePtr fallback, const char* thunkKind)
+        MacroAssemblerCodeRef<JITThunkPtrTag> finalize(MacroAssemblerCodePtr<JITThunkPtrTag> fallback, const char* thunkKind)
         {
             LinkBuffer patchBuffer(*this, GLOBAL_THUNK_ID);
-            patchBuffer.link(m_failures, CodeLocationLabel(fallback));
+            patchBuffer.link(m_failures, CodeLocationLabel<JITThunkPtrTag>(fallback));
             for (unsigned i = 0; i < m_calls.size(); i++)
                 patchBuffer.link(m_calls[i].first, m_calls[i].second);
-            return FINALIZE_CODE(patchBuffer, CodePtrTag, "Specialized thunk for %s", thunkKind);
+            return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Specialized thunk for %s", thunkKind);
         }
 
         // Assumes that the target function uses fpRegister0 as the first argument
         // and return value. Like any sensible architecture would.
-        void callDoubleToDouble(FunctionPtr function)
+        void callDoubleToDouble(FunctionPtr<CFunctionPtrTag> function)
         {
-            assertIsCFunctionPtr(function.executableAddress());
-            PtrTag tag = ptrTag(SpecializedThunkPtrTag, nextPtrTagID());
-            m_calls.append(std::make_pair(call(tag), FunctionPtr(function, tag)));
+            m_calls.append(std::make_pair(call(JITThunkPtrTag), function.retagged<JITThunkPtrTag>()));
         }
         
-        void callDoubleToDoublePreservingReturn(FunctionPtr function)
+        void callDoubleToDoublePreservingReturn(FunctionPtr<CFunctionPtrTag> function)
         {
             if (!isX86())
                 preserveReturnAddressAfterCall(regT3);
@@ -209,7 +207,7 @@
         }
         
         MacroAssembler::JumpList m_failures;
-        Vector<std::pair<Call, FunctionPtr>> m_calls;
+        Vector<std::pair<Call, FunctionPtr<JITThunkPtrTag>>> m_calls;
     };
 
 }
diff --git a/Source/JavaScriptCore/jit/ThunkGenerator.h b/Source/JavaScriptCore/jit/ThunkGenerator.h
index d38ec23..1cf375d 100644
--- a/Source/JavaScriptCore/jit/ThunkGenerator.h
+++ b/Source/JavaScriptCore/jit/ThunkGenerator.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -27,11 +27,13 @@
 
 #if ENABLE(JIT)
 
+#include "PtrTag.h"
+
 namespace JSC {
 class VM;
-class MacroAssemblerCodeRef;
+template<PtrTag> class MacroAssemblerCodeRef;
 
-typedef MacroAssemblerCodeRef (*ThunkGenerator)(VM*);
+using ThunkGenerator = MacroAssemblerCodeRef<JITThunkPtrTag> (*)(VM*);
 
 } // namespace JSC
 
diff --git a/Source/JavaScriptCore/jit/ThunkGenerators.cpp b/Source/JavaScriptCore/jit/ThunkGenerators.cpp
index 9da0b0e..8832065b 100644
--- a/Source/JavaScriptCore/jit/ThunkGenerators.cpp
+++ b/Source/JavaScriptCore/jit/ThunkGenerators.cpp
@@ -60,7 +60,7 @@
 
 // We will jump here if the JIT code tries to make a call, but the
 // linking helper (C++ code) decides to throw an exception instead.
-MacroAssemblerCodeRef throwExceptionFromCallSlowPathGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> throwExceptionFromCallSlowPathGenerator(VM* vm)
 {
     CCallHelpers jit;
     
@@ -70,23 +70,18 @@
 
     jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
 
-    PtrTag callTag = ptrTag(JITThunkPtrTag, nextPtrTagID());
     jit.setupArguments<decltype(lookupExceptionHandler)>(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
-    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(lookupExceptionHandler, callTag)), GPRInfo::nonArgGPR0);
-    emitPointerValidation(jit, GPRInfo::nonArgGPR0, callTag);
-    jit.call(GPRInfo::nonArgGPR0, callTag);
+    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(lookupExceptionHandler)), GPRInfo::nonArgGPR0);
+    emitPointerValidation(jit, GPRInfo::nonArgGPR0, OperationPtrTag);
+    jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
     jit.jumpToExceptionHandler(*vm);
 
-    PtrTag throwExceptionTag = ptrTag(ThrowExceptionPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, throwExceptionTag, "Throw exception from call slow path thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Throw exception from call slow path thunk");
 }
 
-static void slowPathFor(
-    CCallHelpers& jit, VM* vm, Sprt_JITOperation_ECli slowPathFunction, PtrTag expectedLinkedTargetTag)
+static void slowPathFor(CCallHelpers& jit, VM* vm, Sprt_JITOperation_ECli slowPathFunction)
 {
-    PtrTag callTag = ptrTag(JITThunkPtrTag, nextPtrTagID());
-
     jit.sanitizeStackInline(*vm, GPRInfo::nonArgGPR0);
     jit.emitFunctionPrologue();
     jit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
@@ -99,9 +94,9 @@
     jit.move(GPRInfo::regT2, GPRInfo::argumentGPR2);
     jit.addPtr(CCallHelpers::TrustedImm32(32), CCallHelpers::stackPointerRegister, GPRInfo::argumentGPR0);
     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1);
-    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(slowPathFunction, callTag)), GPRInfo::nonArgGPR0);
-    emitPointerValidation(jit, GPRInfo::nonArgGPR0, callTag);
-    jit.call(GPRInfo::nonArgGPR0, callTag);
+    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(slowPathFunction)), GPRInfo::nonArgGPR0);
+    emitPointerValidation(jit, GPRInfo::nonArgGPR0, OperationPtrTag);
+    jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
     jit.loadPtr(CCallHelpers::Address(GPRInfo::returnValueGPR, 8), GPRInfo::returnValueGPR2);
     jit.loadPtr(CCallHelpers::Address(GPRInfo::returnValueGPR), GPRInfo::returnValueGPR);
     jit.addPtr(CCallHelpers::TrustedImm32(maxFrameExtentForSlowPathCall), CCallHelpers::stackPointerRegister);
@@ -109,9 +104,9 @@
     if (maxFrameExtentForSlowPathCall)
         jit.addPtr(CCallHelpers::TrustedImm32(-maxFrameExtentForSlowPathCall), CCallHelpers::stackPointerRegister);
     jit.setupArguments<decltype(slowPathFunction)>(GPRInfo::regT2);
-    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr(slowPathFunction, callTag)), GPRInfo::nonArgGPR0);
-    emitPointerValidation(jit, GPRInfo::nonArgGPR0, callTag);
-    jit.call(GPRInfo::nonArgGPR0, callTag);
+    jit.move(CCallHelpers::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(slowPathFunction)), GPRInfo::nonArgGPR0);
+    emitPointerValidation(jit, GPRInfo::nonArgGPR0, OperationPtrTag);
+    jit.call(GPRInfo::nonArgGPR0, OperationPtrTag);
     if (maxFrameExtentForSlowPathCall)
         jit.addPtr(CCallHelpers::TrustedImm32(maxFrameExtentForSlowPathCall), CCallHelpers::stackPointerRegister);
 #endif
@@ -122,7 +117,7 @@
     // 3) The function to call.
     // The second return value GPR will hold a non-zero value for tail calls.
 
-    emitPointerValidation(jit, GPRInfo::returnValueGPR, expectedLinkedTargetTag);
+    emitPointerValidation(jit, GPRInfo::returnValueGPR, JSEntryPtrTag);
     jit.emitFunctionEpilogue();
     jit.untagReturnAddress();
 
@@ -133,10 +128,10 @@
     jit.prepareForTailCallSlow(GPRInfo::returnValueGPR);
 
     doNotTrash.link(&jit);
-    jit.jump(GPRInfo::returnValueGPR, expectedLinkedTargetTag);
+    jit.jump(GPRInfo::returnValueGPR, JSEntryPtrTag);
 }
 
-MacroAssemblerCodeRef linkCallThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> linkCallThunkGenerator(VM* vm)
 {
     // The return address is on the stack or in the link register. We will hence
     // save the return address to the call frame while we make a C++ function call
@@ -145,33 +140,29 @@
     // been adjusted, and all other registers to be available for use.
     CCallHelpers jit;
 
-    PtrTag expectedLinkedTargetTag = ptrTag(LinkCallResultPtrTag, vm);
-    slowPathFor(jit, vm, operationLinkCall, expectedLinkedTargetTag);
+    slowPathFor(jit, vm, operationLinkCall);
 
-    PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, linkTag, "Link call slow path thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Link call slow path thunk");
 }
 
 // For closure optimizations, we only include calls, since if you're using closures for
 // object construction then you're going to lose big time anyway.
-MacroAssemblerCodeRef linkPolymorphicCallThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> linkPolymorphicCallThunkGenerator(VM* vm)
 {
     CCallHelpers jit;
 
-    PtrTag expectedLinkedTargetTag = ptrTag(LinkPolymorphicCallResultPtrTag, vm);
-    slowPathFor(jit, vm, operationLinkPolymorphicCall, expectedLinkedTargetTag);
+    slowPathFor(jit, vm, operationLinkPolymorphicCall);
 
-    PtrTag linkTag = ptrTag(LinkPolymorphicCallPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, linkTag, "Link polymorphic call slow path thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "Link polymorphic call slow path thunk");
 }
 
 // FIXME: We should distinguish between a megamorphic virtual call vs. a slow
 // path virtual call so that we can enable fast tail calls for megamorphic
 // virtual calls by using the shuffler.
 // https://bugs.webkit.org/show_bug.cgi?id=148831
-MacroAssemblerCodeRef virtualThunkFor(VM* vm, CallLinkInfo& callLinkInfo)
+MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunkFor(VM* vm, CallLinkInfo& callLinkInfo)
 {
     // The callee is in regT0 (for JSVALUE32_64, the tag is in regT1).
     // The return address is on the stack, or in the link register. We will hence
@@ -232,12 +223,12 @@
 
     // Make a tail call. This will return back to JIT code.
     JSInterfaceJIT::Label callCode(jit.label());
-    emitPointerValidation(jit, GPRInfo::regT4, CodePtrTag);
+    emitPointerValidation(jit, GPRInfo::regT4, JSEntryPtrTag);
     if (callLinkInfo.isTailCall()) {
         jit.preserveReturnAddressAfterCall(GPRInfo::regT0);
         jit.prepareForTailCallSlow(GPRInfo::regT4);
     }
-    jit.jump(GPRInfo::regT4, CodePtrTag);
+    jit.jump(GPRInfo::regT4, JSEntryPtrTag);
 
     notJSFunction.link(&jit);
     slowCase.append(jit.branchIfNotType(GPRInfo::regT0, InternalFunctionType));
@@ -248,13 +239,11 @@
     slowCase.link(&jit);
     
     // Here we don't know anything, so revert to the full slow path.
-    PtrTag expectedLinkedTargetTag = ptrTag(LinkVirtualCallResultPtrTag, vm);
-    slowPathFor(jit, vm, operationVirtualCall, expectedLinkedTargetTag);
+    slowPathFor(jit, vm, operationVirtualCall);
 
-    PtrTag linkTag = ptrTag(LinkVirtualCallPtrTag, vm);
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
     return FINALIZE_CODE(
-        patchBuffer, linkTag,
+        patchBuffer, JITStubRoutinePtrTag,
         "Virtual %s slow path thunk",
         callLinkInfo.callMode() == CallMode::Regular ? "call" : callLinkInfo.callMode() == CallMode::Tail ? "tail call" : "construct");
 }
@@ -262,7 +251,7 @@
 enum ThunkEntryType { EnterViaCall, EnterViaJumpWithSavedTags, EnterViaJumpWithoutSavedTags };
 enum class ThunkFunctionType { JSFunction, InternalFunction };
 
-static MacroAssemblerCodeRef nativeForGenerator(VM* vm, ThunkFunctionType thunkFunctionType, CodeSpecializationKind kind, ThunkEntryType entryType = EnterViaCall)
+static MacroAssemblerCodeRef<JITThunkPtrTag> nativeForGenerator(VM* vm, ThunkFunctionType thunkFunctionType, CodeSpecializationKind kind, ThunkEntryType entryType = EnterViaCall)
 {
     // FIXME: This should be able to log ShadowChicken prologue packets.
     // https://bugs.webkit.org/show_bug.cgi?id=155689
@@ -307,9 +296,9 @@
     if (thunkFunctionType == ThunkFunctionType::JSFunction) {
         jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT1);
         jit.xorPtr(JSInterfaceJIT::TrustedImmPtr(JSFunctionPoison::key()), JSInterfaceJIT::regT1);
-        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, executableOffsetToFunction), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, executableOffsetToFunction), JSEntryPtrTag);
     } else
-        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, InternalFunction::offsetOfNativeFunctionFor(kind)), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT1, InternalFunction::offsetOfNativeFunctionFor(kind)), JSEntryPtrTag);
 
     jit.addPtr(JSInterfaceJIT::TrustedImm32(8), JSInterfaceJIT::stackPointerRegister);
 
@@ -328,7 +317,7 @@
         jit.loadPtr(JSInterfaceJIT::Address(X86Registers::esi, InternalFunction::offsetOfNativeFunctionFor(kind)), X86Registers::r9);
     jit.move(JSInterfaceJIT::TrustedImm64(NativeCodePoison::key()), X86Registers::esi);
     jit.xor64(X86Registers::esi, X86Registers::r9);
-    jit.call(X86Registers::r9, CodePtrTag);
+    jit.call(X86Registers::r9, JSEntryPtrTag);
 
 #else
     // Calling convention:      f(ecx, edx, r8, r9, ...);
@@ -343,9 +332,9 @@
     if (thunkFunctionType == ThunkFunctionType::JSFunction) {
         jit.loadPtr(JSInterfaceJIT::Address(X86Registers::edx, JSFunction::offsetOfExecutable()), X86Registers::r9);
         jit.xorPtr(JSInterfaceJIT::TrustedImmPtr(JSFunctionPoison::key()), X86Registers::r9);
-        jit.call(JSInterfaceJIT::Address(X86Registers::r9, executableOffsetToFunction), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(X86Registers::r9, executableOffsetToFunction), JSEntryPtrTag);
     } else
-        jit.call(JSInterfaceJIT::Address(X86Registers::edx, InternalFunction::offsetOfNativeFunctionFor(kind)), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(X86Registers::edx, InternalFunction::offsetOfNativeFunctionFor(kind)), JSEntryPtrTag);
 
     jit.addPtr(JSInterfaceJIT::TrustedImm32(4 * sizeof(int64_t)), JSInterfaceJIT::stackPointerRegister);
 #endif
@@ -367,7 +356,7 @@
         jit.loadPtr(JSInterfaceJIT::Address(ARM64Registers::x1, InternalFunction::offsetOfNativeFunctionFor(kind)), ARM64Registers::x2);
     jit.move(JSInterfaceJIT::TrustedImm64(NativeCodePoison::key()), ARM64Registers::x1);
     jit.xor64(ARM64Registers::x1, ARM64Registers::x2);
-    jit.call(ARM64Registers::x2, CodePtrTag);
+    jit.call(ARM64Registers::x2, JSEntryPtrTag);
 
 #elif CPU(ARM) || CPU(MIPS)
 #if CPU(MIPS)
@@ -383,9 +372,9 @@
     if (thunkFunctionType == ThunkFunctionType::JSFunction) {
         jit.loadPtr(JSInterfaceJIT::Address(JSInterfaceJIT::argumentGPR1, JSFunction::offsetOfExecutable()), JSInterfaceJIT::regT2);
         jit.xorPtr(JSInterfaceJIT::TrustedImmPtr(JSFunctionPoison::key()), JSInterfaceJIT::regT2);
-        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, executableOffsetToFunction), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::regT2, executableOffsetToFunction), JSEntryPtrTag);
     } else
-        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::argumentGPR1, InternalFunction::offsetOfNativeFunctionFor(kind)), CodePtrTag);
+        jit.call(JSInterfaceJIT::Address(JSInterfaceJIT::argumentGPR1, InternalFunction::offsetOfNativeFunctionFor(kind)), JSEntryPtrTag);
 
 #if CPU(MIPS)
     // Restore stack space
@@ -429,9 +418,8 @@
 #endif
     jit.move(JSInterfaceJIT::callFrameRegister, JSInterfaceJIT::argumentGPR0);
 #endif
-    PtrTag tag = ptrTag(ExceptionHandlerPtrTag, nextPtrTagID());
-    jit.move(JSInterfaceJIT::TrustedImmPtr(tagCFunctionPtr(operationVMHandleException, tag)), JSInterfaceJIT::regT3);
-    jit.call(JSInterfaceJIT::regT3, tag);
+    jit.move(JSInterfaceJIT::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(operationVMHandleException)), JSInterfaceJIT::regT3);
+    jit.call(JSInterfaceJIT::regT3, OperationPtrTag);
 #if CPU(X86) && USE(JSVALUE32_64)
     jit.addPtr(JSInterfaceJIT::TrustedImm32(8), JSInterfaceJIT::stackPointerRegister);
 #elif OS(WINDOWS)
@@ -441,40 +429,40 @@
     jit.jumpToExceptionHandler(*vm);
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, CodePtrTag, "%s %s%s trampoline", thunkFunctionType == ThunkFunctionType::JSFunction ? "native" : "internal", entryType == EnterViaJumpWithSavedTags ? "Tail With Saved Tags " : entryType == EnterViaJumpWithoutSavedTags ? "Tail Without Saved Tags " : "", toCString(kind).data());
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "%s %s%s trampoline", thunkFunctionType == ThunkFunctionType::JSFunction ? "native" : "internal", entryType == EnterViaJumpWithSavedTags ? "Tail With Saved Tags " : entryType == EnterViaJumpWithoutSavedTags ? "Tail Without Saved Tags " : "", toCString(kind).data());
 }
 
-MacroAssemblerCodeRef nativeCallGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeCallGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::JSFunction, CodeForCall);
 }
 
-MacroAssemblerCodeRef nativeTailCallGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeTailCallGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::JSFunction, CodeForCall, EnterViaJumpWithSavedTags);
 }
 
-MacroAssemblerCodeRef nativeTailCallWithoutSavedTagsGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeTailCallWithoutSavedTagsGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::JSFunction, CodeForCall, EnterViaJumpWithoutSavedTags);
 }
 
-MacroAssemblerCodeRef nativeConstructGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeConstructGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::JSFunction, CodeForConstruct);
 }
 
-MacroAssemblerCodeRef internalFunctionCallGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> internalFunctionCallGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::InternalFunction, CodeForCall);
 }
 
-MacroAssemblerCodeRef internalFunctionConstructGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> internalFunctionConstructGenerator(VM* vm)
 {
     return nativeForGenerator(vm, ThunkFunctionType::InternalFunction, CodeForConstruct);
 }
 
-MacroAssemblerCodeRef arityFixupGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> arityFixupGenerator(VM* vm)
 {
     JSInterfaceJIT jit(vm);
 
@@ -494,7 +482,7 @@
     jit.loadPtr(JSInterfaceJIT::Address(GPRInfo::callFrameRegister, CallFrame::returnPCOffset()), GPRInfo::regT3);
     jit.addPtr(JSInterfaceJIT::TrustedImm32(sizeof(CallerFrameAndPC)), GPRInfo::callFrameRegister, extraTemp);
     jit.untagPtr(GPRInfo::regT3, extraTemp);
-    PtrTag tempReturnPCTag = ptrTag(ArityFixupPtrTag, nextPtrTagID());
+    PtrTag tempReturnPCTag = static_cast<PtrTag>(random());
     jit.move(JSInterfaceJIT::TrustedImmPtr(tempReturnPCTag), extraTemp);
     jit.tagPtr(GPRInfo::regT3, extraTemp);
     jit.storePtr(GPRInfo::regT3, JSInterfaceJIT::Address(GPRInfo::callFrameRegister, CallFrame::returnPCOffset()));
@@ -619,19 +607,17 @@
 #endif // End of USE(JSVALUE32_64) section.
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    PtrTag arityFixupTag = ptrTag(ArityFixupPtrTag, vm);
-    return FINALIZE_CODE(patchBuffer, arityFixupTag, "fixup arity");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "fixup arity");
 }
 
-MacroAssemblerCodeRef unreachableGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> unreachableGenerator(VM* vm)
 {
     JSInterfaceJIT jit(vm);
 
     jit.breakpoint();
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    PtrTag thunkTag = ptrTag(JITThunkPtrTag, vm);
-    return FINALIZE_CODE(patchBuffer, thunkTag, "unreachable thunk");
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "unreachable thunk");
 }
 
 static void stringCharLoad(SpecializedThunkJIT& jit, VM* vm)
@@ -672,7 +658,7 @@
     jit.appendFailure(jit.branchTestPtr(MacroAssembler::Zero, dst));
 }
 
-MacroAssemblerCodeRef charCodeAtThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> charCodeAtThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     stringCharLoad(jit, vm);
@@ -680,7 +666,7 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "charCodeAt");
 }
 
-MacroAssemblerCodeRef charAtThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> charAtThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     stringCharLoad(jit, vm);
@@ -689,7 +675,7 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "charAt");
 }
 
-MacroAssemblerCodeRef fromCharCodeThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> fromCharCodeThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     // load char code
@@ -699,7 +685,7 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "fromCharCode");
 }
 
-MacroAssemblerCodeRef clz32ThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> clz32ThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     MacroAssembler::Jump nonIntArgJump;
@@ -720,11 +706,11 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "clz32");
 }
 
-MacroAssemblerCodeRef sqrtThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> sqrtThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     if (!jit.supportsFloatingPointSqrt())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
 
     jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0);
     jit.sqrtDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT0);
@@ -880,12 +866,12 @@
 
 static const double halfConstant = 0.5;
     
-MacroAssemblerCodeRef floorThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> floorThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     MacroAssembler::Jump nonIntJump;
     if (!UnaryDoubleOpWrapper(floor) || !jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump);
     jit.returnInt32(SpecializedThunkJIT::regT0);
     nonIntJump.link(&jit);
@@ -923,11 +909,11 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "floor");
 }
 
-MacroAssemblerCodeRef ceilThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> ceilThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     if (!UnaryDoubleOpWrapper(ceil) || !jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     MacroAssembler::Jump nonIntJump;
     jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump);
     jit.returnInt32(SpecializedThunkJIT::regT0);
@@ -946,11 +932,11 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "ceil");
 }
 
-MacroAssemblerCodeRef truncThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> truncThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     if (!UnaryDoubleOpWrapper(trunc) || !jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     MacroAssembler::Jump nonIntJump;
     jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump);
     jit.returnInt32(SpecializedThunkJIT::regT0);
@@ -969,11 +955,11 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "trunc");
 }
 
-MacroAssemblerCodeRef roundThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> roundThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     if (!UnaryDoubleOpWrapper(jsRound) || !jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     MacroAssembler::Jump nonIntJump;
     jit.loadInt32Argument(0, SpecializedThunkJIT::regT0, nonIntJump);
     jit.returnInt32(SpecializedThunkJIT::regT0);
@@ -1003,37 +989,37 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "round");
 }
 
-MacroAssemblerCodeRef expThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> expThunkGenerator(VM* vm)
 {
     if (!UnaryDoubleOpWrapper(exp))
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     SpecializedThunkJIT jit(vm, 1);
     if (!jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0);
     jit.callDoubleToDoublePreservingReturn(UnaryDoubleOpWrapper(exp));
     jit.returnDouble(SpecializedThunkJIT::fpRegT0);
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "exp");
 }
 
-MacroAssemblerCodeRef logThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> logThunkGenerator(VM* vm)
 {
     if (!UnaryDoubleOpWrapper(log))
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     SpecializedThunkJIT jit(vm, 1);
     if (!jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
     jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0);
     jit.callDoubleToDoublePreservingReturn(UnaryDoubleOpWrapper(log));
     jit.returnDouble(SpecializedThunkJIT::fpRegT0);
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "log");
 }
 
-MacroAssemblerCodeRef absThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> absThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 1);
     if (!jit.supportsFloatingPointAbs())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
 
 #if USE(JSVALUE64)
     unsigned virtualRegisterIndex = CallFrame::argumentOffset(0);
@@ -1086,7 +1072,7 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "abs");
 }
 
-MacroAssemblerCodeRef imulThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> imulThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 2);
     MacroAssembler::Jump nonIntArg0Jump;
@@ -1117,11 +1103,11 @@
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "imul");
 }
 
-MacroAssemblerCodeRef randomThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> randomThunkGenerator(VM* vm)
 {
     SpecializedThunkJIT jit(vm, 0);
     if (!jit.supportsFloatingPoint())
-        return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+        return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
 
 #if USE(JSVALUE64)
     jit.emitRandomThunk(*vm, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1, SpecializedThunkJIT::regT2, SpecializedThunkJIT::regT3, SpecializedThunkJIT::fpRegT0);
@@ -1129,11 +1115,11 @@
 
     return jit.finalize(vm->jitStubs->ctiNativeTailCall(vm), "random");
 #else
-    return MacroAssemblerCodeRef::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
+    return MacroAssemblerCodeRef<JITThunkPtrTag>::createSelfManagedCodeRef(vm->jitStubs->ctiNativeCall(vm));
 #endif
 }
 
-MacroAssemblerCodeRef boundThisNoArgsFunctionCallGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> boundThisNoArgsFunctionCallGenerator(VM* vm)
 {
     CCallHelpers jit;
     
@@ -1217,16 +1203,16 @@
     jit.move(CCallHelpers::TrustedImm64(JITCodePoison::key()), GPRInfo::regT1);
     jit.xor64(GPRInfo::regT1, GPRInfo::regT0);
 #endif
-    emitPointerValidation(jit, GPRInfo::regT0, CodePtrTag);
-    jit.call(GPRInfo::regT0, CodePtrTag);
+    emitPointerValidation(jit, GPRInfo::regT0, JSEntryPtrTag);
+    jit.call(GPRInfo::regT0, JSEntryPtrTag);
 
     jit.emitFunctionEpilogue();
     jit.ret();
     
     LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
-    linkBuffer.link(noCode, CodeLocationLabel(vm->jitStubs->ctiNativeTailCallWithoutSavedTags(vm)));
+    linkBuffer.link(noCode, CodeLocationLabel<JITThunkPtrTag>(vm->jitStubs->ctiNativeTailCallWithoutSavedTags(vm)));
     return FINALIZE_CODE(
-        linkBuffer, CodePtrTag, "Specialized thunk for bound function calls with no arguments");
+        linkBuffer, JITThunkPtrTag, "Specialized thunk for bound function calls with no arguments");
 }
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/jit/ThunkGenerators.h b/Source/JavaScriptCore/jit/ThunkGenerators.h
index 618fee0..0c8c274 100644
--- a/Source/JavaScriptCore/jit/ThunkGenerators.h
+++ b/Source/JavaScriptCore/jit/ThunkGenerators.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2010, 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2010-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,46 +26,47 @@
 #pragma once
 
 #include "CodeSpecializationKind.h"
+#include "PtrTag.h"
 
 #if ENABLE(JIT)
 namespace JSC {
 
 class CallLinkInfo;
-class MacroAssemblerCodeRef;
+template<PtrTag> class MacroAssemblerCodeRef;
 class VM;
 
-MacroAssemblerCodeRef throwExceptionFromCallSlowPathGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> throwExceptionFromCallSlowPathGenerator(VM*);
 
-MacroAssemblerCodeRef linkCallThunk(VM*, CallLinkInfo&, CodeSpecializationKind);
-MacroAssemblerCodeRef linkCallThunkGenerator(VM*);
-MacroAssemblerCodeRef linkPolymorphicCallThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> linkCallThunk(VM*, CallLinkInfo&, CodeSpecializationKind);
+MacroAssemblerCodeRef<JITThunkPtrTag> linkCallThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> linkPolymorphicCallThunkGenerator(VM*);
 
-MacroAssemblerCodeRef virtualThunkFor(VM*, CallLinkInfo&);
+MacroAssemblerCodeRef<JITStubRoutinePtrTag> virtualThunkFor(VM*, CallLinkInfo&);
 
-MacroAssemblerCodeRef nativeCallGenerator(VM*);
-MacroAssemblerCodeRef nativeConstructGenerator(VM*);
-MacroAssemblerCodeRef nativeTailCallGenerator(VM*);
-MacroAssemblerCodeRef nativeTailCallWithoutSavedTagsGenerator(VM*);
-MacroAssemblerCodeRef internalFunctionCallGenerator(VM*);
-MacroAssemblerCodeRef internalFunctionConstructGenerator(VM*);
-MacroAssemblerCodeRef arityFixupGenerator(VM*);
-MacroAssemblerCodeRef unreachableGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeCallGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeConstructGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeTailCallGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> nativeTailCallWithoutSavedTagsGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> internalFunctionCallGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> internalFunctionConstructGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> arityFixupGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> unreachableGenerator(VM*);
 
-MacroAssemblerCodeRef charCodeAtThunkGenerator(VM*);
-MacroAssemblerCodeRef charAtThunkGenerator(VM*);
-MacroAssemblerCodeRef clz32ThunkGenerator(VM*);
-MacroAssemblerCodeRef fromCharCodeThunkGenerator(VM*);
-MacroAssemblerCodeRef absThunkGenerator(VM*);
-MacroAssemblerCodeRef ceilThunkGenerator(VM*);
-MacroAssemblerCodeRef expThunkGenerator(VM*);
-MacroAssemblerCodeRef floorThunkGenerator(VM*);
-MacroAssemblerCodeRef logThunkGenerator(VM*);
-MacroAssemblerCodeRef roundThunkGenerator(VM*);
-MacroAssemblerCodeRef sqrtThunkGenerator(VM*);
-MacroAssemblerCodeRef imulThunkGenerator(VM*);
-MacroAssemblerCodeRef randomThunkGenerator(VM*);
-MacroAssemblerCodeRef truncThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> charCodeAtThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> charAtThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> clz32ThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> fromCharCodeThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> absThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> ceilThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> expThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> floorThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> logThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> roundThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> sqrtThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> imulThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> randomThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> truncThunkGenerator(VM*);
 
-MacroAssemblerCodeRef boundThisNoArgsFunctionCallGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> boundThisNoArgsFunctionCallGenerator(VM*);
 }
 #endif // ENABLE(JIT)
diff --git a/Source/JavaScriptCore/llint/LLIntData.cpp b/Source/JavaScriptCore/llint/LLIntData.cpp
index 57e0bc6..57f3c2b 100644
--- a/Source/JavaScriptCore/llint/LLIntData.cpp
+++ b/Source/JavaScriptCore/llint/LLIntData.cpp
@@ -58,26 +58,26 @@
 #else // ENABLE(JIT)
     llint_entry(&Data::s_opcodeMap);
 
-    for (int i = 0; i < NUMBER_OF_BYTECODE_IDS; ++i) {
-        PtrTag tag = (i == op_catch) ? ExceptionHandlerPtrTag : BytecodePtrTag;
-        Data::s_opcodeMap[i] = tagCodePtr(Data::s_opcodeMap[i], tag);
-    }
+    for (int i = 0; i < NUMBER_OF_BYTECODE_IDS; ++i)
+        Data::s_opcodeMap[i] = tagCodePtr(Data::s_opcodeMap[i], BytecodePtrTag);
 
     static const PtrTag tagsForOpcode[] = {
-        CodePtrTag, // llint_program_prologue
-        CodePtrTag, // llint_eval_prologue
-        CodePtrTag, // llint_module_program_prologue
-        CodePtrTag, // llint_function_for_call_prologue
-        CodePtrTag, // llint_function_for_construct_prologue
-        CodePtrTag, // llint_function_for_call_arity_check
-        CodePtrTag, // llint_function_for_construct_arity_check
-        CodePtrTag, // llint_generic_return_point
+        // FIXME: These should be tagged with JSInternalPtrTag instead of JSEntryTag.
+        // https://bugs.webkit.org/show_bug.cgi?id=184712
+        JSEntryPtrTag, // llint_program_prologue
+        JSEntryPtrTag, // llint_eval_prologue
+        JSEntryPtrTag, // llint_module_program_prologue
+        JSEntryPtrTag, // llint_function_for_call_prologue
+        JSEntryPtrTag, // llint_function_for_construct_prologue
+        JSEntryPtrTag, // llint_function_for_call_arity_check
+        JSEntryPtrTag, // llint_function_for_construct_arity_check
+        JSEntryPtrTag, // llint_generic_return_point
         BytecodePtrTag, // llint_throw_from_slow_path_trampoline
         ExceptionHandlerPtrTag, // llint_throw_during_call_trampoline
-        CodePtrTag, // llint_native_call_trampoline
-        CodePtrTag, // llint_native_construct_trampoline
-        CodePtrTag, // llint_internal_function_call_trampoline
-        CodePtrTag, // llint_internal_function_construct_trampoline
+        JSEntryPtrTag, // llint_native_call_trampoline
+        JSEntryPtrTag, // llint_native_construct_trampoline
+        JSEntryPtrTag, // llint_internal_function_call_trampoline
+        JSEntryPtrTag, // llint_internal_function_construct_trampoline
         ExceptionHandlerPtrTag, // handleUncaughtException
     };
 
@@ -89,7 +89,7 @@
         Data::s_opcodeMap[opcodeID] = tagCodePtr(Data::s_opcodeMap[opcodeID], tagsForOpcode[i]);
     }
 
-    void* handler = LLInt::getCodePtr(llint_throw_from_slow_path_trampoline);
+    void* handler = LLInt::getExecutableAddress(llint_throw_from_slow_path_trampoline);
     for (int i = 0; i < maxOpcodeLength + 1; ++i)
         Data::s_exceptionInstructions[i].u.pointer = handler;
 
diff --git a/Source/JavaScriptCore/llint/LLIntData.h b/Source/JavaScriptCore/llint/LLIntData.h
index 73e7362..8ba7c03 100644
--- a/Source/JavaScriptCore/llint/LLIntData.h
+++ b/Source/JavaScriptCore/llint/LLIntData.h
@@ -26,8 +26,8 @@
 #pragma once
 
 #include "JSCJSValue.h"
+#include "MacroAssemblerCodeRef.h"
 #include "Opcode.h"
-#include "PtrTag.h"
 
 namespace JSC {
 
@@ -55,7 +55,8 @@
     friend Instruction* exceptionInstructions();
     friend Opcode* opcodeMap();
     friend Opcode getOpcode(OpcodeID);
-    friend void* getCodePtr(OpcodeID);
+    template<PtrTag tag> friend MacroAssemblerCodePtr<tag> getCodePtr(OpcodeID);
+    template<PtrTag tag> friend MacroAssemblerCodeRef<tag> getCodeRef(OpcodeID);
 };
 
 void initialize();
@@ -79,16 +80,40 @@
 #endif
 }
 
-ALWAYS_INLINE void* getCodePtr(OpcodeID id)
+ALWAYS_INLINE void* getExecutableAddress(OpcodeID opcodeID)
 {
-    return reinterpret_cast<void*>(getOpcode(id));
+    ASSERT(opcodeID >= NUMBER_OF_BYTECODE_IDS);
+    return reinterpret_cast<void*>(getOpcode(opcodeID));
+}
+
+template<PtrTag tag>
+ALWAYS_INLINE MacroAssemblerCodePtr<tag> getCodePtr(OpcodeID opcodeID)
+{
+    return MacroAssemblerCodePtr<tag>::createFromExecutableAddress(getOpcode(opcodeID));
+}
+
+template<PtrTag tag>
+ALWAYS_INLINE MacroAssemblerCodeRef<tag> getCodeRef(OpcodeID opcodeID)
+{
+    return MacroAssemblerCodeRef<tag>::createSelfManagedCodeRef(getCodePtr<tag>(opcodeID));
 }
 
 #if ENABLE(JIT)
-
-ALWAYS_INLINE LLIntCode getCodeFunctionPtr(OpcodeID codeId)
+template<PtrTag tag>
+ALWAYS_INLINE LLIntCode getCodeFunctionPtr(OpcodeID opcodeID)
 {
-    return reinterpret_cast<LLIntCode>(getCodePtr(codeId));
+    ASSERT(opcodeID >= NUMBER_OF_BYTECODE_IDS);
+#if COMPILER(MSVC)
+    return reinterpret_cast<LLIntCode>(getCodePtr<tag>(opcodeID).executableAddress());
+#else
+    return reinterpret_cast<LLIntCode>(getCodePtr<tag>(opcodeID).template executableAddress());
+#endif
+}
+
+#else
+ALWAYS_INLINE void* getCodePtr(OpcodeID id)
+{
+    return reinterpret_cast<void*>(getOpcode(id));
 }
 #endif
 
diff --git a/Source/JavaScriptCore/llint/LLIntEntrypoint.cpp b/Source/JavaScriptCore/llint/LLIntEntrypoint.cpp
index 380851a..41678a9 100644
--- a/Source/JavaScriptCore/llint/LLIntEntrypoint.cpp
+++ b/Source/JavaScriptCore/llint/LLIntEntrypoint.cpp
@@ -46,12 +46,12 @@
     if (VM::canUseJIT()) {
         if (kind == CodeForCall) {
             codeBlock->setJITCode(
-                adoptRef(*new DirectJITCode(vm.getCTIStub(functionForCallEntryThunkGenerator), vm.getCTIStub(functionForCallArityCheckThunkGenerator).code(), JITCode::InterpreterThunk)));
+                adoptRef(*new DirectJITCode(vm.getCTIStub(functionForCallEntryThunkGenerator).retagged<JSEntryPtrTag>(), vm.getCTIStub(functionForCallArityCheckThunkGenerator).retaggedCode<JSEntryPtrTag>(), JITCode::InterpreterThunk)));
             return;
         }
         ASSERT(kind == CodeForConstruct);
         codeBlock->setJITCode(
-            adoptRef(*new DirectJITCode(vm.getCTIStub(functionForConstructEntryThunkGenerator), vm.getCTIStub(functionForConstructArityCheckThunkGenerator).code(), JITCode::InterpreterThunk)));
+            adoptRef(*new DirectJITCode(vm.getCTIStub(functionForConstructEntryThunkGenerator).retagged<JSEntryPtrTag>(), vm.getCTIStub(functionForConstructArityCheckThunkGenerator).retaggedCode<JSEntryPtrTag>(), JITCode::InterpreterThunk)));
         return;
     }
 #endif // ENABLE(JIT)
@@ -59,19 +59,19 @@
     UNUSED_PARAM(vm);
     if (kind == CodeForCall) {
         codeBlock->setJITCode(
-            adoptRef(*new DirectJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_function_for_call_prologue), MacroAssemblerCodePtr::createLLIntCodePtr(llint_function_for_call_arity_check), JITCode::InterpreterThunk)));
+            adoptRef(*new DirectJITCode(getCodeRef<JSEntryPtrTag>(llint_function_for_call_prologue), getCodePtr<JSEntryPtrTag>(llint_function_for_call_arity_check), JITCode::InterpreterThunk)));
         return;
     }
     ASSERT(kind == CodeForConstruct);
     codeBlock->setJITCode(
-        adoptRef(*new DirectJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_function_for_construct_prologue), MacroAssemblerCodePtr::createLLIntCodePtr(llint_function_for_construct_arity_check), JITCode::InterpreterThunk)));
+        adoptRef(*new DirectJITCode(getCodeRef<JSEntryPtrTag>(llint_function_for_construct_prologue), getCodePtr<JSEntryPtrTag>(llint_function_for_construct_arity_check), JITCode::InterpreterThunk)));
 }
 
 static void setEvalEntrypoint(VM& vm, CodeBlock* codeBlock)
 {
 #if ENABLE(JIT)
     if (VM::canUseJIT()) {
-        MacroAssemblerCodeRef codeRef = vm.getCTIStub(evalEntryThunkGenerator);
+        MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = vm.getCTIStub(evalEntryThunkGenerator).retagged<JSEntryPtrTag>();
         codeBlock->setJITCode(
             adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
         return;
@@ -79,7 +79,7 @@
 #endif // ENABLE(JIT)
 
     UNUSED_PARAM(vm);
-    MacroAssemblerCodeRef codeRef = MacroAssemblerCodeRef::createLLIntCodeRef(llint_eval_prologue);
+    MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = getCodeRef<JSEntryPtrTag>(llint_eval_prologue);
     codeBlock->setJITCode(
         adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
 }
@@ -88,7 +88,7 @@
 {
 #if ENABLE(JIT)
     if (VM::canUseJIT()) {
-        MacroAssemblerCodeRef codeRef = vm.getCTIStub(programEntryThunkGenerator);
+        MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = vm.getCTIStub(programEntryThunkGenerator).retagged<JSEntryPtrTag>();
         codeBlock->setJITCode(
             adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
         return;
@@ -96,7 +96,7 @@
 #endif // ENABLE(JIT)
 
     UNUSED_PARAM(vm);
-    MacroAssemblerCodeRef codeRef = MacroAssemblerCodeRef::createLLIntCodeRef(llint_program_prologue);
+    MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = getCodeRef<JSEntryPtrTag>(llint_program_prologue);
     codeBlock->setJITCode(
         adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
 }
@@ -105,7 +105,7 @@
 {
 #if ENABLE(JIT)
     if (VM::canUseJIT()) {
-        MacroAssemblerCodeRef codeRef = vm.getCTIStub(moduleProgramEntryThunkGenerator);
+        MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = vm.getCTIStub(moduleProgramEntryThunkGenerator).retagged<JSEntryPtrTag>();
         codeBlock->setJITCode(
             adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
         return;
@@ -113,7 +113,7 @@
 #endif // ENABLE(JIT)
 
     UNUSED_PARAM(vm);
-    MacroAssemblerCodeRef codeRef = MacroAssemblerCodeRef::createLLIntCodeRef(llint_module_program_prologue);
+    MacroAssemblerCodeRef<JSEntryPtrTag> codeRef = getCodeRef<JSEntryPtrTag>(llint_module_program_prologue).retagged<JSEntryPtrTag>();
     codeBlock->setJITCode(
         adoptRef(*new DirectJITCode(codeRef, codeRef.code(), JITCode::InterpreterThunk)));
 }
diff --git a/Source/JavaScriptCore/llint/LLIntExceptions.cpp b/Source/JavaScriptCore/llint/LLIntExceptions.cpp
index e6490d7..bc0d5b2 100644
--- a/Source/JavaScriptCore/llint/LLIntExceptions.cpp
+++ b/Source/JavaScriptCore/llint/LLIntExceptions.cpp
@@ -58,7 +58,7 @@
     auto scope = DECLARE_THROW_SCOPE(*vm);
     dataLog("Throwing exception ", JSValue(scope.exception()), " (callToThrow).\n");
 #endif
-    return LLInt::getCodePtr(llint_throw_during_call_trampoline);
+    return LLInt::getExecutableAddress(llint_throw_during_call_trampoline);
 }
 
 } } // namespace JSC::LLInt
diff --git a/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp b/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp
index ddcf072..50a89ee 100644
--- a/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp
+++ b/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp
@@ -444,11 +444,10 @@
     ASSERT(codeBlock->jitType() == JITCode::BaselineJIT);
 
     const JITCodeMap& codeMap = codeBlock->jitCodeMap();
-    CodeLocationLabel codeLocation = codeMap.find(loopOSREntryBytecodeOffset);
+    CodeLocationLabel<JSEntryPtrTag> codeLocation = codeMap.find(loopOSREntryBytecodeOffset);
     ASSERT(codeLocation);
 
-    PtrTag locationTag = ptrTag(CodePtrTag, codeBlock, loopOSREntryBytecodeOffset);
-    void* jumpTarget = codeLocation.retagged(locationTag, CodePtrTag).executableAddress();
+    void* jumpTarget = codeLocation.executableAddress();
     ASSERT(jumpTarget);
     
     LLINT_RETURN_TWO(jumpTarget, exec->topOfFrame());
@@ -1438,7 +1437,7 @@
     JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
     if (!calleeAsFunctionCell) {
         if (auto* internalFunction = jsDynamicCast<InternalFunction*>(vm, calleeAsValue)) {
-            MacroAssemblerCodePtr codePtr = vm.getCTIInternalFunctionTrampolineFor(kind);
+            MacroAssemblerCodePtr<JSEntryPtrTag> codePtr = vm.getCTIInternalFunctionTrampolineFor(kind);
             ASSERT(!!codePtr);
 
             if (!LLINT_ALWAYS_ACCESS_SLOW && callLinkInfo) {
@@ -1450,12 +1449,12 @@
                     callLinkInfo->remove();
                 callLinkInfo->callee.set(vm, callerCodeBlock, internalFunction);
                 callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock, internalFunction);
-                callLinkInfo->machineCodeTarget = codePtr.retagged(CodePtrTag, LLIntCallICPtrTag);
+                callLinkInfo->machineCodeTarget = codePtr;
             }
 
-            assertIsTaggedWith(codePtr.executableAddress(), CodePtrTag);
+            assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
             PoisonedMasmPtr::assertIsNotPoisoned(codePtr.executableAddress());
-            LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), CodePtrTag);
+            LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
         }
         throwScope.release();
         return handleHostCall(execCallee, pc, calleeAsValue, kind);
@@ -1464,7 +1463,7 @@
     JSScope* scope = callee->scopeUnchecked();
     ExecutableBase* executable = callee->executable();
 
-    MacroAssemblerCodePtr codePtr;
+    MacroAssemblerCodePtr<JSEntryPtrTag> codePtr;
     CodeBlock* codeBlock = 0;
     if (executable->isHostFunction())
         codePtr = executable->entrypointFor(kind, MustCheckArity);
@@ -1500,14 +1499,14 @@
             callLinkInfo->remove();
         callLinkInfo->callee.set(vm, callerCodeBlock, callee);
         callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock, callee);
-        callLinkInfo->machineCodeTarget = codePtr.retagged(CodePtrTag, LLIntCallICPtrTag);
+        callLinkInfo->machineCodeTarget = codePtr;
         if (codeBlock)
             codeBlock->linkIncomingCall(exec, callLinkInfo);
     }
 
-    assertIsTaggedWith(codePtr.executableAddress(), CodePtrTag);
+    assertIsTaggedWith(codePtr.executableAddress(), JSEntryPtrTag);
     PoisonedMasmPtr::assertIsNotPoisoned(codePtr.executableAddress());
-    LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), CodePtrTag);
+    LLINT_CALL_RETURN(exec, execCallee, codePtr.executableAddress(), JSEntryPtrTag);
 }
 
 inline SlowPathReturnType genericCall(ExecState* exec, Instruction* pc, CodeSpecializationKind kind)
@@ -1637,7 +1636,7 @@
     execCallee->setArgumentCountIncludingThis(pc[3].u.operand);
     execCallee->setCallerFrame(exec);
     execCallee->uncheckedR(CallFrameSlot::callee) = calleeAsValue;
-    execCallee->setReturnPC(LLInt::getCodePtr(llint_generic_return_point));
+    execCallee->setReturnPC(LLInt::getExecutableAddress(llint_generic_return_point));
     execCallee->setCodeBlock(0);
     exec->setCurrentVPC(pc);
     
diff --git a/Source/JavaScriptCore/llint/LLIntThunks.cpp b/Source/JavaScriptCore/llint/LLIntThunks.cpp
index 62aa653..28b9a8e 100644
--- a/Source/JavaScriptCore/llint/LLIntThunks.cpp
+++ b/Source/JavaScriptCore/llint/LLIntThunks.cpp
@@ -44,6 +44,7 @@
 
 EncodedJSValue JS_EXPORT_PRIVATE vmEntryToWasm(void* code, VM* vm, ProtoCallFrame* frame)
 {
+    code = retagCodePtr<WasmEntryPtrTag, JSEntryPtrTag>(code);
     return vmEntryToJavaScript(code, vm, frame);
 }
     
@@ -51,50 +52,52 @@
 
 namespace LLInt {
 
-static MacroAssemblerCodeRef generateThunkWithJumpTo(VM* vm, OpcodeID opcodeID, const char *thunkKind)
+static MacroAssemblerCodeRef<JITThunkPtrTag> generateThunkWithJumpTo(VM* vm, OpcodeID opcodeID, const char *thunkKind)
 {
     JSInterfaceJIT jit(vm);
 
     // FIXME: there's probably a better way to do it on X86, but I'm not sure I care.
-    LLIntCode target = LLInt::getCodeFunctionPtr(opcodeID);
-    jit.move(JSInterfaceJIT::TrustedImmPtr(bitwise_cast<void*>(target)), JSInterfaceJIT::regT0);
-    jit.jump(JSInterfaceJIT::regT0, CodePtrTag);
+    LLIntCode target = LLInt::getCodeFunctionPtr<JSEntryPtrTag>(opcodeID);
+    assertIsTaggedWith(target, JSEntryPtrTag);
+
+    jit.move(JSInterfaceJIT::TrustedImmPtr(target), JSInterfaceJIT::regT0);
+    jit.jump(JSInterfaceJIT::regT0, JSEntryPtrTag);
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(patchBuffer, CodePtrTag, "LLInt %s prologue thunk", thunkKind);
+    return FINALIZE_CODE(patchBuffer, JITThunkPtrTag, "LLInt %s prologue thunk", thunkKind);
 }
 
-MacroAssemblerCodeRef functionForCallEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForCallEntryThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_function_for_call_prologue, "function for call");
 }
 
-MacroAssemblerCodeRef functionForConstructEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForConstructEntryThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_function_for_construct_prologue, "function for construct");
 }
 
-MacroAssemblerCodeRef functionForCallArityCheckThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForCallArityCheckThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_function_for_call_arity_check, "function for call with arity check");
 }
 
-MacroAssemblerCodeRef functionForConstructArityCheckThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForConstructArityCheckThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_function_for_construct_arity_check, "function for construct with arity check");
 }
 
-MacroAssemblerCodeRef evalEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> evalEntryThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_eval_prologue, "eval");
 }
 
-MacroAssemblerCodeRef programEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> programEntryThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_program_prologue, "program");
 }
 
-MacroAssemblerCodeRef moduleProgramEntryThunkGenerator(VM* vm)
+MacroAssemblerCodeRef<JITThunkPtrTag> moduleProgramEntryThunkGenerator(VM* vm)
 {
     return generateThunkWithJumpTo(vm, llint_module_program_prologue, "module_program");
 }
diff --git a/Source/JavaScriptCore/llint/LLIntThunks.h b/Source/JavaScriptCore/llint/LLIntThunks.h
index fc9742c..f6a84b8 100644
--- a/Source/JavaScriptCore/llint/LLIntThunks.h
+++ b/Source/JavaScriptCore/llint/LLIntThunks.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -42,12 +42,12 @@
 
 namespace LLInt {
 
-MacroAssemblerCodeRef functionForCallEntryThunkGenerator(VM*);
-MacroAssemblerCodeRef functionForConstructEntryThunkGenerator(VM*);
-MacroAssemblerCodeRef functionForCallArityCheckThunkGenerator(VM*);
-MacroAssemblerCodeRef functionForConstructArityCheckThunkGenerator(VM*);
-MacroAssemblerCodeRef evalEntryThunkGenerator(VM*);
-MacroAssemblerCodeRef programEntryThunkGenerator(VM*);
-MacroAssemblerCodeRef moduleProgramEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForCallEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForConstructEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForCallArityCheckThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> functionForConstructArityCheckThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> evalEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> programEntryThunkGenerator(VM*);
+MacroAssemblerCodeRef<JITThunkPtrTag> moduleProgramEntryThunkGenerator(VM*);
 
 } } // namespace JSC::LLInt
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter.asm b/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
index fa40917..adc1d6e 100644
--- a/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
@@ -262,9 +262,8 @@
 
 # Pointer Tags
 const BytecodePtrTag = constexpr BytecodePtrTag
-const CodePtrTag = constexpr CodePtrTag
+const JSEntryPtrTag = constexpr JSEntryPtrTag
 const ExceptionHandlerPtrTag = constexpr ExceptionHandlerPtrTag
-const LLIntCallICPtrTag = constexpr LLIntCallICPtrTag
 const NoPtrTag = constexpr NoPtrTag
 const SlowPathPtrTag = constexpr SlowPathPtrTag
 
@@ -1037,7 +1036,7 @@
         else
             pop cfr
         end
-        jmp r0, CodePtrTag
+        jmp r0, JSEntryPtrTag
     .recover:
         codeBlockGetter(t1, t2)
     .continue:
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm b/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm
index fa58b2f..d51b6d1 100644
--- a/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter32_64.asm
@@ -2011,8 +2011,8 @@
     storei t2, ArgumentCount + PayloadOffset[t3]
     storei CellTag, Callee + TagOffset[t3]
     move t3, sp
-    prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, LLIntCallICPtrTag)
-    callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], LLIntCallICPtrTag)
+    prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, JSEntryPtrTag)
+    callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], JSEntryPtrTag)
 
 .opCallSlow:
     slowPathForCall(slowPath, prepareCall)
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm b/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
index c2f64a3..626989d 100644
--- a/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter64.asm
@@ -254,7 +254,7 @@
     if C_LOOP
         cloopCallJSFunction entry
     else
-        call entry, CodePtrTag
+        call entry, JSEntryPtrTag
     end
     subp 16, sp
 end
@@ -270,10 +270,10 @@
     elsif X86_64_WIN
         # We need to allocate 32 bytes on the stack for the shadow space.
         subp 32, sp
-        call temp, CodePtrTag
+        call temp, JSEntryPtrTag
         addp 32, sp
     else
-        call temp, CodePtrTag
+        call temp, JSEntryPtrTag
     end
 end
 
@@ -370,7 +370,7 @@
             cCall2(_llint_loop_osr)
             btpz r0, .recover
             move r1, sp
-            jmp r0, CodePtrTag
+            jmp r0, JSEntryPtrTag
         .recover:
             loadi ArgumentCount + TagOffset[cfr], PC
         end)
@@ -2056,11 +2056,11 @@
     if POISON
         loadp _g_JITCodePoison, t2
         xorp LLIntCallLinkInfo::machineCodeTarget[t1], t2
-        prepareCall(t2, t1, t3, t4, LLIntCallICPtrTag)
-        callTargetFunction(t2, LLIntCallICPtrTag)
+        prepareCall(t2, t1, t3, t4, JSEntryPtrTag)
+        callTargetFunction(t2, JSEntryPtrTag)
     else
-        prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, LLIntCallICPtrTag)
-        callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], LLIntCallICPtrTag)
+        prepareCall(LLIntCallLinkInfo::machineCodeTarget[t1], t2, t3, t4, JSEntryPtrTag)
+        callTargetFunction(LLIntCallLinkInfo::machineCodeTarget[t1], JSEntryPtrTag)
     end
 
 .opCallSlow:
@@ -2191,12 +2191,12 @@
     else
         if X86_64_WIN
             subp 32, sp
-            call executableOffsetToFunction[t1], CodePtrTag
+            call executableOffsetToFunction[t1], JSEntryPtrTag
             addp 32, sp
         else
             loadp _g_NativeCodePoison, t2
             xorp executableOffsetToFunction[t1], t2
-            call t2, CodePtrTag
+            call t2, JSEntryPtrTag
         end
     end
 
@@ -2234,12 +2234,12 @@
     else
         if X86_64_WIN
             subp 32, sp
-            call offsetOfFunction[t1], CodePtrTag
+            call offsetOfFunction[t1], JSEntryPtrTag
             addp 32, sp
         else
             loadp _g_NativeCodePoison, t2
             xorp offsetOfFunction[t1], t2
-            call t2, CodePtrTag
+            call t2, JSEntryPtrTag
         end
     end
 
diff --git a/Source/JavaScriptCore/profiler/ProfilerCompilation.cpp b/Source/JavaScriptCore/profiler/ProfilerCompilation.cpp
index ebe92c3..46e5eee 100644
--- a/Source/JavaScriptCore/profiler/ProfilerCompilation.cpp
+++ b/Source/JavaScriptCore/profiler/ProfilerCompilation.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2014, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -84,7 +84,7 @@
     return counter.get();
 }
 
-void Compilation::addOSRExitSite(const Vector<const void*>& codeAddresses)
+void Compilation::addOSRExitSite(const Vector<MacroAssemblerCodePtr<JSInternalPtrTag>>& codeAddresses)
 {
     m_osrExitSites.append(OSRExitSite(codeAddresses));
 }
diff --git a/Source/JavaScriptCore/profiler/ProfilerCompilation.h b/Source/JavaScriptCore/profiler/ProfilerCompilation.h
index 1e05862..617f658 100644
--- a/Source/JavaScriptCore/profiler/ProfilerCompilation.h
+++ b/Source/JavaScriptCore/profiler/ProfilerCompilation.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012-2014, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -70,7 +70,7 @@
     void addDescription(const CompiledBytecode&);
     void addDescription(const OriginStack&, const CString& description);
     ExecutionCounter* executionCounterFor(const OriginStack&);
-    void addOSRExitSite(const Vector<const void*>& codeAddresses);
+    void addOSRExitSite(const Vector<MacroAssemblerCodePtr<JSInternalPtrTag>>& codeAddresses);
     OSRExit* addOSRExit(unsigned id, const OriginStack&, ExitKind, bool isWatchpoint);
     
     void setJettisonReason(JettisonReason, const FireDetail*);
diff --git a/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.cpp b/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.cpp
index 8b5568f..382a107 100644
--- a/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.cpp
+++ b/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -41,7 +41,7 @@
     JSArray* result = constructEmptyArray(exec, 0);
     RETURN_IF_EXCEPTION(scope, { });
     for (unsigned i = 0; i < m_codeAddresses.size(); ++i) {
-        result->putDirectIndex(exec, i, jsString(exec, toString(RawPointer(m_codeAddresses[i]))));
+        result->putDirectIndex(exec, i, jsString(exec, toString(RawPointer(m_codeAddresses[i].dataLocation()))));
         RETURN_IF_EXCEPTION(scope, { });
     }
     return result;
diff --git a/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.h b/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.h
index 1776f1d..e3941ab 100644
--- a/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.h
+++ b/Source/JavaScriptCore/profiler/ProfilerOSRExitSite.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,23 +26,24 @@
 #pragma once
 
 #include "JSCJSValue.h"
+#include "MacroAssemblerCodeRef.h"
 #include <wtf/Vector.h>
 
 namespace JSC { namespace Profiler {
 
 class OSRExitSite {
 public:
-    explicit OSRExitSite(const Vector<const void*>& codeAddresses)
+    explicit OSRExitSite(const Vector<MacroAssemblerCodePtr<JSInternalPtrTag>>& codeAddresses)
         : m_codeAddresses(codeAddresses)
     {
     }
     
-    const Vector<const void*>& codeAddress() const { return m_codeAddresses; }
+    const Vector<MacroAssemblerCodePtr<JSInternalPtrTag>>& codeAddress() const { return m_codeAddresses; }
     
     JSValue toJS(ExecState*) const;
 
 private:
-    Vector<const void*> m_codeAddresses;
+    Vector<MacroAssemblerCodePtr<JSInternalPtrTag>> m_codeAddresses;
 };
 
 } } // namespace JSC::Profiler
diff --git a/Source/JavaScriptCore/runtime/ExecutableBase.cpp b/Source/JavaScriptCore/runtime/ExecutableBase.cpp
index 3151245..4937b48 100644
--- a/Source/JavaScriptCore/runtime/ExecutableBase.cpp
+++ b/Source/JavaScriptCore/runtime/ExecutableBase.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009, 2010, 2013, 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -54,8 +54,8 @@
 #if ENABLE(JIT)
     m_jitCodeForCall = nullptr;
     m_jitCodeForConstruct = nullptr;
-    m_jitCodeForCallWithArityCheck = MacroAssemblerCodePtr();
-    m_jitCodeForConstructWithArityCheck = MacroAssemblerCodePtr();
+    m_jitCodeForCallWithArityCheck = MacroAssemblerCodePtr<JSEntryPtrTag>();
+    m_jitCodeForConstructWithArityCheck = MacroAssemblerCodePtr<JSEntryPtrTag>();
 #endif
     m_numParametersForCall = NUM_PARAMETERS_NOT_COMPILED;
     m_numParametersForConstruct = NUM_PARAMETERS_NOT_COMPILED;
diff --git a/Source/JavaScriptCore/runtime/ExecutableBase.h b/Source/JavaScriptCore/runtime/ExecutableBase.h
index 8890a2a..09466a5 100644
--- a/Source/JavaScriptCore/runtime/ExecutableBase.h
+++ b/Source/JavaScriptCore/runtime/ExecutableBase.h
@@ -57,7 +57,7 @@
 
 class ExecutableBase : public JSCell {
     friend class JIT;
-    friend MacroAssemblerCodeRef boundThisNoArgsFunctionCallGenerator(VM*);
+    friend MacroAssemblerCodeRef<JITThunkPtrTag> boundThisNoArgsFunctionCallGenerator(VM*);
 
 protected:
     static const int NUM_PARAMETERS_IS_HOST = 0;
@@ -143,8 +143,8 @@
         ASSERT(kind == CodeForConstruct);
         return generatedJITCodeForConstruct();
     }
-    
-    MacroAssemblerCodePtr entrypointFor(CodeSpecializationKind kind, ArityCheckMode arity)
+
+    MacroAssemblerCodePtr<JSEntryPtrTag> entrypointFor(CodeSpecializationKind kind, ArityCheckMode arity)
     {
         // Check if we have a cached result. We only have it for arity check because we use the
         // no-arity entrypoint in non-virtual calls, which will "cache" this value directly in
@@ -152,17 +152,16 @@
         if (arity == MustCheckArity) {
             switch (kind) {
             case CodeForCall:
-                if (MacroAssemblerCodePtr result = m_jitCodeForCallWithArityCheck)
+                if (MacroAssemblerCodePtr<JSEntryPtrTag> result = m_jitCodeForCallWithArityCheck)
                     return result;
                 break;
             case CodeForConstruct:
-                if (MacroAssemblerCodePtr result = m_jitCodeForConstructWithArityCheck)
+                if (MacroAssemblerCodePtr<JSEntryPtrTag> result = m_jitCodeForConstructWithArityCheck)
                     return result;
                 break;
             }
         }
-        MacroAssemblerCodePtr result =
-            generatedJITCodeFor(kind)->addressForCall(arity);
+        MacroAssemblerCodePtr<JSEntryPtrTag> result = generatedJITCodeFor(kind)->addressForCall(arity);
         if (arity == MustCheckArity) {
             // Cache the result; this is necessary for the JIT's virtual call optimizations.
             switch (kind) {
@@ -232,8 +231,8 @@
     Intrinsic m_intrinsic;
     RefPtr<JITCode> m_jitCodeForCall;
     RefPtr<JITCode> m_jitCodeForConstruct;
-    MacroAssemblerCodePtr m_jitCodeForCallWithArityCheck;
-    MacroAssemblerCodePtr m_jitCodeForConstructWithArityCheck;
+    MacroAssemblerCodePtr<JSEntryPtrTag> m_jitCodeForCallWithArityCheck;
+    MacroAssemblerCodePtr<JSEntryPtrTag> m_jitCodeForConstructWithArityCheck;
 };
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/runtime/NativeExecutable.cpp b/Source/JavaScriptCore/runtime/NativeExecutable.cpp
index a790fbb..51cfcc8 100644
--- a/Source/JavaScriptCore/runtime/NativeExecutable.cpp
+++ b/Source/JavaScriptCore/runtime/NativeExecutable.cpp
@@ -67,10 +67,10 @@
     m_jitCodeForConstructWithArityCheck = m_jitCodeForConstruct->addressForCall(MustCheckArity);
     m_name = name;
 
-    assertIsTaggedWith(m_jitCodeForCall->addressForCall(ArityCheckNotRequired).executableAddress(), CodePtrTag);
-    assertIsTaggedWith(m_jitCodeForConstruct->addressForCall(ArityCheckNotRequired).executableAddress(), CodePtrTag);
-    assertIsTaggedWith(m_jitCodeForCallWithArityCheck.executableAddress(), CodePtrTag);
-    assertIsTaggedWith(m_jitCodeForConstructWithArityCheck.executableAddress(), CodePtrTag);
+    assertIsTaggedWith(m_jitCodeForCall->addressForCall(ArityCheckNotRequired).executableAddress(), JSEntryPtrTag);
+    assertIsTaggedWith(m_jitCodeForConstruct->addressForCall(ArityCheckNotRequired).executableAddress(), JSEntryPtrTag);
+    assertIsTaggedWith(m_jitCodeForCallWithArityCheck.executableAddress(), JSEntryPtrTag);
+    assertIsTaggedWith(m_jitCodeForConstructWithArityCheck.executableAddress(), JSEntryPtrTag);
 }
 
 NativeExecutable::NativeExecutable(VM& vm, TaggedNativeFunction function, TaggedNativeFunction constructor, Intrinsic intrinsic, const DOMJIT::Signature* signature)
diff --git a/Source/JavaScriptCore/runtime/NativeFunction.h b/Source/JavaScriptCore/runtime/NativeFunction.h
index f6e407a..958dcc5 100644
--- a/Source/JavaScriptCore/runtime/NativeFunction.h
+++ b/Source/JavaScriptCore/runtime/NativeFunction.h
@@ -70,10 +70,10 @@
     explicit TaggedNativeFunction(intptr_t bits) : m_ptr(bitwise_cast<void*>(bits)) { }
 
     TaggedNativeFunction(NativeFunction func)
-        : m_ptr(tagCFunctionPtr<void*>(func.m_ptr, CodePtrTag))
+        : m_ptr(tagCFunctionPtr<void*, JSEntryPtrTag>(func.m_ptr))
     { }
     TaggedNativeFunction(RawNativeFunction func)
-        : m_ptr(tagCFunctionPtr<void*>(func, CodePtrTag))
+        : m_ptr(tagCFunctionPtr<void*, JSEntryPtrTag>(func))
     { }
 
     explicit operator bool() const { return !!m_ptr; }
@@ -86,7 +86,7 @@
     explicit operator NativeFunction()
     {
         ASSERT(m_ptr);
-        return untagCFunctionPtr<NativeFunction>(m_ptr, CodePtrTag);
+        return untagCFunctionPtr<NativeFunction, JSEntryPtrTag>(m_ptr);
     }
 
     void* rawPointer() const { return m_ptr; }
diff --git a/Source/JavaScriptCore/runtime/PtrTag.h b/Source/JavaScriptCore/runtime/PtrTag.h
index d3ba632..a9d1581 100644
--- a/Source/JavaScriptCore/runtime/PtrTag.h
+++ b/Source/JavaScriptCore/runtime/PtrTag.h
@@ -31,52 +31,28 @@
 
 #define FOR_EACH_PTRTAG_ENUM(v) \
     v(NoPtrTag) \
-    v(NearCodePtrTag) \
     v(CFunctionPtrTag) \
     \
-    v(ArityFixupPtrTag) \
     v(B3CCallPtrTag) \
+    v(B3CompilationPtrTag) \
     v(BytecodePtrTag) \
-    v(BytecodeHelperPtrTag) \
-    v(CodePtrTag) \
-    v(DFGOSREntryPtrTag) \
-    v(DFGOSRExitPtrTag) \
-    v(DFGOperationPtrTag) \
+    v(DisassemblyPtrTag) \
     v(ExceptionHandlerPtrTag) \
-    v(FTLCodePtrTag) \
-    v(FTLLazySlowPathPtrTag) \
-    v(FTLOSRExitPtrTag) \
-    v(FTLOperationPtrTag) \
-    v(FTLSlowPathPtrTag) \
-    v(GetPropertyPtrTag) \
-    v(GetterSetterPtrTag) \
-    v(HasPropertyPtrTag) \
-    v(JITCodePtrTag) \
-    v(JITOperationPtrTag) \
     v(JITThunkPtrTag) \
-    v(JITWriteThunkPtrTag) \
-    v(LLIntCallICPtrTag) \
-    v(LinkCallPtrTag) \
-    v(LinkCallResultPtrTag) \
-    v(LinkPolymorphicCallPtrTag) \
-    v(LinkPolymorphicCallResultPtrTag) \
-    v(LinkVirtualCallPtrTag) \
-    v(LinkVirtualCallResultPtrTag) \
-    v(MathICPtrTag) \
-    v(NativeCodePtrTag) \
-    v(PutPropertyPtrTag) \
+    v(JITStubRoutinePtrTag) \
+    v(JSEntryPtrTag) \
+    v(JSInternalPtrTag) \
+    v(JSSwitchPtrTag) \
+    v(LinkBufferPtrTag) \
+    v(OperationPtrTag) \
+    v(OSRExitPtrTag) \
     v(SlowPathPtrTag) \
-    v(SpecializedThunkPtrTag) \
-    v(SwitchTablePtrTag) \
-    v(ThrowExceptionPtrTag) \
+    v(WasmEntryPtrTag) \
     v(Yarr8BitPtrTag) \
     v(Yarr16BitPtrTag) \
     v(YarrMatchOnly8BitPtrTag) \
     v(YarrMatchOnly16BitPtrTag) \
     v(YarrBacktrackPtrTag) \
-    v(WasmCallPtrTag) \
-    v(WasmHelperPtrTag) \
-
 
 enum PtrTag : uintptr_t {
 #define DECLARE_PTRTAG_ENUM(tag)  tag,
@@ -85,8 +61,7 @@
 };
 
 static_assert(static_cast<uintptr_t>(NoPtrTag) == static_cast<uintptr_t>(0), "");
-static_assert(static_cast<uintptr_t>(NearCodePtrTag) == static_cast<uintptr_t>(1), "");
-static_assert(static_cast<uintptr_t>(CFunctionPtrTag) == static_cast<uintptr_t>(2), "");
+static_assert(static_cast<uintptr_t>(CFunctionPtrTag) == static_cast<uintptr_t>(1), "");
 
 inline const char* ptrTagName(PtrTag tag)
 {
@@ -98,10 +73,7 @@
 #undef RETURN_PTRTAG_NAME
 }
 
-uintptr_t nextPtrTagID();
-
 #if !USE(POINTER_PROFILING)
-inline uintptr_t nextPtrTagID() { return 0; }
 
 inline const char* tagForPtr(const void*) { return "<no tag>"; }
 
@@ -111,21 +83,39 @@
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline constexpr T tagCodePtr(PtrType ptr, PtrTag) { return bitwise_cast<T>(ptr); }
 
+template<typename T, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline T tagCodePtr(PtrType ptr) { return bitwise_cast<T>(ptr); }
+
 template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
 inline constexpr PtrType tagCodePtr(PtrType ptr, PtrTag) { return ptr; }
 
+template<PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline PtrType tagCodePtr(PtrType ptr) { return ptr; }
+
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline constexpr T untagCodePtr(PtrType ptr, PtrTag) { return bitwise_cast<T>(ptr); }
 
+template<typename T, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline T untagCodePtr(PtrType ptr)  { return bitwise_cast<T>(ptr); }
+
 template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
 inline constexpr PtrType untagCodePtr(PtrType ptr, PtrTag) { return ptr; }
 
+template<PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline PtrType untagCodePtr(PtrType ptr) { return ptr; }
+
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline constexpr T retagCodePtr(PtrType ptr, PtrTag, PtrTag) { return bitwise_cast<T>(ptr); }
 
+template<typename T, PtrTag, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline T retagCodePtr(PtrType ptr) { return bitwise_cast<T>(ptr); }
+
 template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
 inline constexpr PtrType retagCodePtr(PtrType ptr, PtrTag, PtrTag) { return ptr; }
 
+template<PtrTag, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline PtrType retagCodePtr(PtrType ptr) { return ptr; }
+
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline constexpr T removeCodePtrTag(PtrType ptr) { return bitwise_cast<T>(ptr); }
 
@@ -135,15 +125,27 @@
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline T tagCFunctionPtr(PtrType ptr, PtrTag) { return bitwise_cast<T>(ptr); }
 
+template<typename T, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline T tagCFunctionPtr(PtrType ptr) { return bitwise_cast<T>(ptr); }
+
 template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
 inline PtrType tagCFunctionPtr(PtrType ptr, PtrTag) { return ptr; }
 
+template<PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline PtrType tagCFunctionPtr(PtrType ptr) { return ptr; }
+
 template<typename T, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value && !std::is_same<T, PtrType>::value>>
 inline T untagCFunctionPtr(PtrType ptr, PtrTag) { return bitwise_cast<T>(ptr); }
 
+template<typename T, PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline T untagCFunctionPtr(PtrType ptr) { return bitwise_cast<T>(ptr); }
+
 template<typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
 inline PtrType untagCFunctionPtr(PtrType ptr, PtrTag) { return ptr; }
 
+template<PtrTag, typename PtrType, typename = std::enable_if_t<std::is_pointer<PtrType>::value>>
+inline PtrType untagCFunctionPtr(PtrType ptr) { return ptr; }
+
 template<typename PtrType> void assertIsCFunctionPtr(PtrType) { }
 template<typename PtrType> void assertIsNullOrCFunctionPtr(PtrType) { }
 
diff --git a/Source/JavaScriptCore/runtime/PutPropertySlot.h b/Source/JavaScriptCore/runtime/PutPropertySlot.h
index fa8d302..be4daf7 100644
--- a/Source/JavaScriptCore/runtime/PutPropertySlot.h
+++ b/Source/JavaScriptCore/runtime/PutPropertySlot.h
@@ -49,7 +49,6 @@
         , m_isInitialization(isInitialization)
         , m_context(context)
         , m_cacheability(CachingAllowed)
-        , m_putFunction(nullptr)
     {
     }
 
@@ -67,17 +66,15 @@
         m_offset = offset;
     }
 
-    void setCustomValue(JSObject* base, PutValueFunc function)
+    void setCustomValue(JSObject* base, FunctionPtr<OperationPtrTag> function)
     {
-        assertIsNullOrCFunctionPtr(function);
         m_type = CustomValue;
         m_base = base;
         m_putFunction = function;
     }
 
-    void setCustomAccessor(JSObject* base, PutValueFunc function)
+    void setCustomAccessor(JSObject* base, FunctionPtr<OperationPtrTag> function)
     {
-        assertIsNullOrCFunctionPtr(function);
         m_type = CustomAccessor;
         m_base = base;
         m_putFunction = function;
@@ -100,7 +97,7 @@
         m_isStrictMode = value;
     }
 
-    PutValueFunc customSetter() const
+    FunctionPtr<OperationPtrTag> customSetter() const
     {
         ASSERT(isCacheableCustom());
         return m_putFunction;
@@ -140,7 +137,7 @@
     bool m_isInitialization;
     uint8_t m_context;
     CacheabilityType m_cacheability;
-    PutValueFunc m_putFunction;
+    FunctionPtr<OperationPtrTag> m_putFunction;
 };
 
 } // namespace JSC
diff --git a/Source/JavaScriptCore/runtime/ScriptExecutable.cpp b/Source/JavaScriptCore/runtime/ScriptExecutable.cpp
index c34021a..5edf7aa 100644
--- a/Source/JavaScriptCore/runtime/ScriptExecutable.cpp
+++ b/Source/JavaScriptCore/runtime/ScriptExecutable.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2009, 2010, 2013, 2015-2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2009-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -137,12 +137,12 @@
     switch (kind) {
     case CodeForCall:
         m_jitCodeForCall = genericCodeBlock ? genericCodeBlock->jitCode() : nullptr;
-        m_jitCodeForCallWithArityCheck = MacroAssemblerCodePtr();
+        m_jitCodeForCallWithArityCheck = nullptr;
         m_numParametersForCall = genericCodeBlock ? genericCodeBlock->numParameters() : NUM_PARAMETERS_NOT_COMPILED;
         break;
     case CodeForConstruct:
         m_jitCodeForConstruct = genericCodeBlock ? genericCodeBlock->jitCode() : nullptr;
-        m_jitCodeForConstructWithArityCheck = MacroAssemblerCodePtr();
+        m_jitCodeForConstructWithArityCheck = nullptr;
         m_numParametersForConstruct = genericCodeBlock ? genericCodeBlock->numParameters() : NUM_PARAMETERS_NOT_COMPILED;
         break;
     }
diff --git a/Source/JavaScriptCore/runtime/VM.cpp b/Source/JavaScriptCore/runtime/VM.cpp
index 352f0a2..8be8ffd 100644
--- a/Source/JavaScriptCore/runtime/VM.cpp
+++ b/Source/JavaScriptCore/runtime/VM.cpp
@@ -681,23 +681,23 @@
     UNUSED_PARAM(intrinsic);
 #endif // ENABLE(JIT)
     return NativeExecutable::create(*this,
-        adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
-        adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
+        adoptRef(*new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
+        adoptRef(*new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
         NoIntrinsic, signature, name);
 }
 
-MacroAssemblerCodePtr VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
+MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
 {
 #if ENABLE(JIT)
     if (canUseJIT()) {
         if (kind == CodeForCall)
-            return jitStubs->ctiInternalFunctionCall(this);
-        return jitStubs->ctiInternalFunctionConstruct(this);
+            return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
+        return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
     }
 #endif
     if (kind == CodeForCall)
-        return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_call_trampoline);
-    return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_construct_trampoline);
+        return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
+    return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
 }
 
 VM::ClientData::~ClientData()
diff --git a/Source/JavaScriptCore/runtime/VM.h b/Source/JavaScriptCore/runtime/VM.h
index b160ac1..9669edd 100644
--- a/Source/JavaScriptCore/runtime/VM.h
+++ b/Source/JavaScriptCore/runtime/VM.h
@@ -542,7 +542,7 @@
     Interpreter* interpreter;
 #if ENABLE(JIT)
     std::unique_ptr<JITThunks> jitStubs;
-    MacroAssemblerCodeRef getCTIStub(ThunkGenerator generator)
+    MacroAssemblerCodeRef<JITThunkPtrTag> getCTIStub(ThunkGenerator generator)
     {
         return jitStubs->ctiStub(this, generator);
     }
@@ -554,7 +554,7 @@
     NativeExecutable* getHostFunction(NativeFunction, NativeFunction constructor, const String& name);
     NativeExecutable* getHostFunction(NativeFunction, Intrinsic, NativeFunction constructor, const DOMJIT::Signature*, const String& name);
 
-    MacroAssemblerCodePtr getCTIInternalFunctionTrampolineFor(CodeSpecializationKind);
+    MacroAssemblerCodePtr<JSEntryPtrTag> getCTIInternalFunctionTrampolineFor(CodeSpecializationKind);
 
     static ptrdiff_t exceptionOffset()
     {
diff --git a/Source/JavaScriptCore/wasm/WasmB3IRGenerator.cpp b/Source/JavaScriptCore/wasm/WasmB3IRGenerator.cpp
index e97c5be..107336b 100644
--- a/Source/JavaScriptCore/wasm/WasmB3IRGenerator.cpp
+++ b/Source/JavaScriptCore/wasm/WasmB3IRGenerator.cpp
@@ -437,7 +437,7 @@
                     overflow.append(jit.branchPtr(CCallHelpers::Above, scratch1, fp));
                 overflow.append(jit.branchPtr(CCallHelpers::Below, scratch1, scratch2));
                 jit.addLinkTask([overflow] (LinkBuffer& linkBuffer) {
-                    linkBuffer.link(overflow, CodeLocationLabel(Thunks::singleton().stub(throwStackOverflowFromWasmThunkGenerator).code()));
+                    linkBuffer.link(overflow, CodeLocationLabel<JITThunkPtrTag>(Thunks::singleton().stub(throwStackOverflowFromWasmThunkGenerator).code()));
                 });
             } else if (m_usesInstanceValue && Context::useFastTLS()) {
                 // No overflow check is needed, but the instance values still needs to be correct.
@@ -498,7 +498,7 @@
     auto jumpToExceptionStub = jit.jump();
 
     jit.addLinkTask([jumpToExceptionStub] (LinkBuffer& linkBuffer) {
-        linkBuffer.link(jumpToExceptionStub, CodeLocationLabel(Thunks::singleton().stub(throwExceptionFromWasmThunkGenerator).code()));
+        linkBuffer.link(jumpToExceptionStub, CodeLocationLabel<JITThunkPtrTag>(Thunks::singleton().stub(throwExceptionFromWasmThunkGenerator).code()));
     });
 }
 
@@ -937,7 +937,7 @@
             jit.jump(tierUpResume);
 
             jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-                MacroAssembler::repatchNearCall(linkBuffer.locationOfNearCall(call), CodeLocationLabel(Thunks::singleton().stub(triggerOMGTierUpThunkGenerator).code()));
+                MacroAssembler::repatchNearCall(linkBuffer.locationOfNearCall<NoPtrTag>(call), CodeLocationLabel<JITThunkPtrTag>(Thunks::singleton().stub(triggerOMGTierUpThunkGenerator).code()));
 
             });
         });
@@ -1112,7 +1112,7 @@
                     AllowMacroScratchRegisterUsage allowScratch(jit);
                     CCallHelpers::Call call = jit.threadSafePatchableNearCall();
                     jit.addLinkTask([unlinkedWasmToWasmCalls, call, functionIndex] (LinkBuffer& linkBuffer) {
-                        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall(call), functionIndex });
+                        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall<WasmEntryPtrTag>(call), functionIndex });
                     });
                 });
             });
@@ -1128,7 +1128,6 @@
         if (Options::usePoisoning())
             jumpDestination = isEmbedderBlock->appendNew<Value>(m_proc, BitXor, origin(), jumpDestination, isEmbedderBlock->appendNew<Const64Value>(m_proc, origin(), g_JITCodePoison));
 
-        PtrTag callTag = ptrTag(WasmCallPtrTag, signature.hash());
         Value* embedderCallResult = wasmCallingConvention().setupCall(m_proc, isEmbedderBlock, origin(), args, toB3Type(returnType),
             [=] (PatchpointValue* patchpoint) {
                 patchpoint->effects.writesPinned = true;
@@ -1138,9 +1137,9 @@
                 // We pessimistically assume we could be calling to something that is bounds checking.
                 // FIXME: We shouldn't have to do this: https://bugs.webkit.org/show_bug.cgi?id=172181
                 patchpoint->clobberLate(PinnedRegisterInfo::get().toSave(MemoryMode::BoundsChecking));
-                patchpoint->setGenerator([returnType, callTag] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
+                patchpoint->setGenerator([returnType] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
                     AllowMacroScratchRegisterUsage allowScratch(jit);
-                    jit.call(params[returnType == Void ? 0 : 1].gpr(), callTag);
+                    jit.call(params[returnType == Void ? 0 : 1].gpr(), WasmEntryPtrTag);
                 });
             });
         UpsilonValue* embedderCallResultUpsilon = returnType == Void ? nullptr : isEmbedderBlock->appendNew<UpsilonValue>(m_proc, origin(), embedderCallResult);
@@ -1168,7 +1167,7 @@
                     AllowMacroScratchRegisterUsage allowScratch(jit);
                     CCallHelpers::Call call = jit.threadSafePatchableNearCall();
                     jit.addLinkTask([unlinkedWasmToWasmCalls, call, functionIndex] (LinkBuffer& linkBuffer) {
-                        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall(call), functionIndex });
+                        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall<WasmEntryPtrTag>(call), functionIndex });
                     });
                 });
             });
@@ -1310,7 +1309,6 @@
         calleeCode = m_currentBlock->appendNew<Value>(m_proc, BitXor, origin(), calleeCode, m_currentBlock->appendNew<Const64Value>(m_proc, origin(), g_JITCodePoison));
 
     Type returnType = signature.returnType();
-    PtrTag callTag = ptrTag(WasmCallPtrTag, signature.hash());
     result = wasmCallingConvention().setupCall(m_proc, m_currentBlock, origin(), args, toB3Type(returnType),
         [=] (PatchpointValue* patchpoint) {
             patchpoint->effects.writesPinned = true;
@@ -1325,7 +1323,7 @@
             patchpoint->append(calleeCode, ValueRep::SomeRegister);
             patchpoint->setGenerator([=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
-                jit.call(params[returnType == Void ? 0 : 1].gpr(), callTag);
+                jit.call(params[returnType == Void ? 0 : 1].gpr(), WasmEntryPtrTag);
             });
         });
 
diff --git a/Source/JavaScriptCore/wasm/WasmBBQPlan.cpp b/Source/JavaScriptCore/wasm/WasmBBQPlan.cpp
index ba4ee4f..a7820c9 100644
--- a/Source/JavaScriptCore/wasm/WasmBBQPlan.cpp
+++ b/Source/JavaScriptCore/wasm/WasmBBQPlan.cpp
@@ -174,10 +174,8 @@
         if (import->kind != ExternalKind::Function)
             continue;
         unsigned importFunctionIndex = m_wasmToWasmExitStubs.size();
-        SignatureIndex signatureIndex = m_moduleInformation->importFunctionSignatureIndices[importFunctionIndex];
-        const Signature& signature = SignatureInformation::get(signatureIndex);
         dataLogLnIf(WasmBBQPlanInternal::verbose, "Processing import function number ", importFunctionIndex, ": ", makeString(import->module), ": ", makeString(import->field));
-        auto binding = wasmToWasm(signature, importFunctionIndex);
+        auto binding = wasmToWasm(importFunctionIndex);
         if (UNLIKELY(!binding)) {
             switch (binding.error()) {
             case BindingFailure::OutOfMemory:
@@ -303,7 +301,6 @@
             CompilationContext& context = m_compilationContexts[functionIndex];
             SignatureIndex signatureIndex = m_moduleInformation->internalFunctionSignatureIndices[functionIndex];
             const Signature& signature = SignatureInformation::get(signatureIndex);
-            PtrTag callTag = ptrTag(WasmCallPtrTag, signature.hash());
             {
                 LinkBuffer linkBuffer(*context.wasmEntrypointJIT, nullptr, JITCompilationCanFail);
                 if (UNLIKELY(linkBuffer.didFailToAllocate())) {
@@ -312,7 +309,7 @@
                 }
 
                 m_wasmInternalFunctions[functionIndex]->entrypoint.compilation = std::make_unique<B3::Compilation>(
-                    FINALIZE_CODE(linkBuffer, callTag, "WebAssembly function[%i] %s", functionIndex, signature.toString().ascii().data()),
+                    FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "WebAssembly function[%i] %s", functionIndex, signature.toString().ascii().data()),
                     WTFMove(context.wasmEntrypointByproducts));
             }
 
@@ -324,23 +321,20 @@
                 }
 
                 embedderToWasmInternalFunction->entrypoint.compilation = std::make_unique<B3::Compilation>(
-                    FINALIZE_CODE(linkBuffer, CodePtrTag, "Embedder->WebAssembly entrypoint[%i] %s", functionIndex, signature.toString().ascii().data()),
+                    FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "Embedder->WebAssembly entrypoint[%i] %s", functionIndex, signature.toString().ascii().data()),
                     WTFMove(context.embedderEntrypointByproducts));
             }
         }
 
         for (auto& unlinked : m_unlinkedWasmToWasmCalls) {
             for (auto& call : unlinked) {
-                MacroAssemblerCodePtr executableAddress;
+                MacroAssemblerCodePtr<WasmEntryPtrTag> executableAddress;
                 if (m_moduleInformation->isImportedFunctionFromFunctionIndexSpace(call.functionIndexSpace)) {
                     // FIXME imports could have been linked in B3, instead of generating a patchpoint. This condition should be replaced by a RELEASE_ASSERT. https://bugs.webkit.org/show_bug.cgi?id=166462
                     executableAddress = m_wasmToWasmExitStubs.at(call.functionIndexSpace).code();
                 } else
-                    executableAddress = m_wasmInternalFunctions.at(call.functionIndexSpace - m_moduleInformation->importFunctionCount())->entrypoint.compilation->code();
-                SignatureIndex signatureIndex = m_moduleInformation->signatureIndexFromFunctionIndexSpace(call.functionIndexSpace);
-                const Signature& signature = SignatureInformation::get(signatureIndex);
-                PtrTag oldTag = ptrTag(WasmCallPtrTag, signature.hash());
-                MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel(executableAddress.retagged(oldTag, NearCodePtrTag)));
+                    executableAddress = m_wasmInternalFunctions.at(call.functionIndexSpace - m_moduleInformation->importFunctionCount())->entrypoint.compilation->code().retagged<WasmEntryPtrTag>();
+                MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel<WasmEntryPtrTag>(executableAddress));
             }
         }
     }
diff --git a/Source/JavaScriptCore/wasm/WasmBBQPlan.h b/Source/JavaScriptCore/wasm/WasmBBQPlan.h
index c6ea497..d88145d 100644
--- a/Source/JavaScriptCore/wasm/WasmBBQPlan.h
+++ b/Source/JavaScriptCore/wasm/WasmBBQPlan.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -88,7 +88,7 @@
         return WTFMove(m_callLinkInfos);
     }
 
-    Vector<MacroAssemblerCodeRef>&& takeWasmToWasmExitStubs()
+    Vector<MacroAssemblerCodeRef<WasmEntryPtrTag>>&& takeWasmToWasmExitStubs()
     {
         RELEASE_ASSERT(!failed() && !hasWork());
         return WTFMove(m_wasmToWasmExitStubs);
@@ -137,7 +137,7 @@
     const char* stateString(State);
     
     Bag<CallLinkInfo> m_callLinkInfos;
-    Vector<MacroAssemblerCodeRef> m_wasmToWasmExitStubs;
+    Vector<MacroAssemblerCodeRef<WasmEntryPtrTag>> m_wasmToWasmExitStubs;
     Vector<std::unique_ptr<InternalFunction>> m_wasmInternalFunctions;
     HashSet<uint32_t, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_exportedFunctionIndices;
     HashMap<uint32_t, std::unique_ptr<InternalFunction>, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_embedderToWasmInternalFunctions;
diff --git a/Source/JavaScriptCore/wasm/WasmBinding.cpp b/Source/JavaScriptCore/wasm/WasmBinding.cpp
index 9fa0653..26bac6c 100644
--- a/Source/JavaScriptCore/wasm/WasmBinding.cpp
+++ b/Source/JavaScriptCore/wasm/WasmBinding.cpp
@@ -37,7 +37,7 @@
 
 using JIT = CCallHelpers;
 
-Expected<MacroAssemblerCodeRef, BindingFailure> wasmToWasm(const Signature& signature, unsigned importIndex)
+Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToWasm(unsigned importIndex)
 {
     // FIXME: Consider uniquify the stubs based on signature + index to see if this saves memory.
     // https://bugs.webkit.org/show_bug.cgi?id=184157
@@ -80,14 +80,13 @@
     jit.loadPtr(scratch, scratch);
     if (Options::usePoisoning())
         jit.xorPtr(JIT::TrustedImmPtr(g_JITCodePoison), scratch);
-    PtrTag tag = ptrTag(WasmCallPtrTag, signature.hash());
-    jit.jump(scratch, tag);
+    jit.jump(scratch, WasmEntryPtrTag);
 
     LinkBuffer patchBuffer(jit, GLOBAL_THUNK_ID, JITCompilationCanFail);
     if (UNLIKELY(patchBuffer.didFailToAllocate()))
         return makeUnexpected(BindingFailure::OutOfMemory);
 
-    return FINALIZE_CODE(patchBuffer, tag, "WebAssembly->WebAssembly import[%i]", importIndex);
+    return FINALIZE_CODE(patchBuffer, WasmEntryPtrTag, "WebAssembly->WebAssembly import[%i]", importIndex);
 }
 
 } } // namespace JSC::Wasm
diff --git a/Source/JavaScriptCore/wasm/WasmBinding.h b/Source/JavaScriptCore/wasm/WasmBinding.h
index bf511df..007ee81 100644
--- a/Source/JavaScriptCore/wasm/WasmBinding.h
+++ b/Source/JavaScriptCore/wasm/WasmBinding.h
@@ -41,7 +41,7 @@
     OutOfMemory,
 };
 
-Expected<MacroAssemblerCodeRef, BindingFailure> wasmToWasm(const Signature&, unsigned importIndex);
+Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToWasm(unsigned importIndex);
 
 } } // namespace JSC::Wasm
 
diff --git a/Source/JavaScriptCore/wasm/WasmCallee.h b/Source/JavaScriptCore/wasm/WasmCallee.h
index 14bf522..01c992e 100644
--- a/Source/JavaScriptCore/wasm/WasmCallee.h
+++ b/Source/JavaScriptCore/wasm/WasmCallee.h
@@ -50,7 +50,7 @@
         return adoptRef(*callee);
     }
 
-    MacroAssemblerCodePtr entrypoint() const { return m_entrypoint.compilation->code(); }
+    MacroAssemblerCodePtr<WasmEntryPtrTag> entrypoint() const { return m_entrypoint.compilation->code().retagged<WasmEntryPtrTag>(); }
 
     RegisterAtOffsetList* calleeSaveRegisters() { return &m_entrypoint.calleeSaveRegisters; }
     IndexOrName indexOrName() const { return m_indexOrName; }
diff --git a/Source/JavaScriptCore/wasm/WasmCallingConvention.h b/Source/JavaScriptCore/wasm/WasmCallingConvention.h
index ca832ee..126a3f3 100644
--- a/Source/JavaScriptCore/wasm/WasmCallingConvention.h
+++ b/Source/JavaScriptCore/wasm/WasmCallingConvention.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -85,7 +85,7 @@
 
 public:
     static unsigned headerSizeInBytes() { return headerSize; }
-    void setupFrameInPrologue(CodeLocationDataLabelPtr* calleeMoveLocation, B3::Procedure& proc, B3::Origin origin, B3::BasicBlock* block) const
+    void setupFrameInPrologue(CodeLocationDataLabelPtr<WasmEntryPtrTag>* calleeMoveLocation, B3::Procedure& proc, B3::Origin origin, B3::BasicBlock* block) const
     {
         static_assert(CallFrameSlot::callee * sizeof(Register) < headerSize, "We rely on this here for now.");
         static_assert(CallFrameSlot::codeBlock * sizeof(Register) < headerSize, "We rely on this here for now.");
@@ -98,7 +98,7 @@
                 GPRReg result = params[0].gpr();
                 MacroAssembler::DataLabelPtr moveLocation = jit.moveWithPatch(MacroAssembler::TrustedImmPtr(nullptr), result);
                 jit.addLinkTask([calleeMoveLocation, moveLocation] (LinkBuffer& linkBuffer) {
-                    *calleeMoveLocation = linkBuffer.locationOf(moveLocation);
+                    *calleeMoveLocation = linkBuffer.locationOf<WasmEntryPtrTag>(moveLocation);
                 });
             });
 
diff --git a/Source/JavaScriptCore/wasm/WasmCodeBlock.h b/Source/JavaScriptCore/wasm/WasmCodeBlock.h
index 780a3a7..0d253fc 100644
--- a/Source/JavaScriptCore/wasm/WasmCodeBlock.h
+++ b/Source/JavaScriptCore/wasm/WasmCodeBlock.h
@@ -96,7 +96,7 @@
         return *m_callees[calleeIndex].get();
     }
 
-    MacroAssemblerCodePtr* entrypointLoadLocationFromFunctionIndexSpace(unsigned functionIndexSpace)
+    MacroAssemblerCodePtr<WasmEntryPtrTag>* entrypointLoadLocationFromFunctionIndexSpace(unsigned functionIndexSpace)
     {
         RELEASE_ASSERT(functionIndexSpace >= functionImportCount());
         unsigned calleeIndex = functionIndexSpace - functionImportCount();
@@ -123,10 +123,10 @@
     Vector<RefPtr<Callee>> m_callees;
     Vector<RefPtr<Callee>> m_optimizedCallees;
     HashMap<uint32_t, RefPtr<Callee>, typename DefaultHash<uint32_t>::Hash, WTF::UnsignedWithZeroKeyHashTraits<uint32_t>> m_embedderCallees;
-    Vector<MacroAssemblerCodePtr> m_wasmIndirectCallEntryPoints;
+    Vector<MacroAssemblerCodePtr<WasmEntryPtrTag>> m_wasmIndirectCallEntryPoints;
     Vector<TierUpCount> m_tierUpCounts;
     Vector<Vector<UnlinkedWasmToWasmCall>> m_wasmToWasmCallsites;
-    Vector<MacroAssemblerCodeRef> m_wasmToWasmExitStubs;
+    Vector<MacroAssemblerCodeRef<WasmEntryPtrTag>> m_wasmToWasmExitStubs;
     RefPtr<BBQPlan> m_plan;
     std::atomic<bool> m_compilationFinished { false };
     String m_errorMessage;
diff --git a/Source/JavaScriptCore/wasm/WasmFaultSignalHandler.cpp b/Source/JavaScriptCore/wasm/WasmFaultSignalHandler.cpp
index 9657108..3e6d16f 100644
--- a/Source/JavaScriptCore/wasm/WasmFaultSignalHandler.cpp
+++ b/Source/JavaScriptCore/wasm/WasmFaultSignalHandler.cpp
@@ -80,7 +80,7 @@
                 dataLogLnIf(WasmFaultSignalHandlerInternal::verbose, "function start: ", RawPointer(start), " end: ", RawPointer(end));
                 if (start <= faultingInstruction && faultingInstruction < end) {
                     dataLogLnIf(WasmFaultSignalHandlerInternal::verbose, "found match");
-                    MacroAssemblerCodeRef exceptionStub = Thunks::singleton().existingStub(throwExceptionFromWasmThunkGenerator);
+                    MacroAssemblerCodeRef<JITThunkPtrTag> exceptionStub = Thunks::singleton().existingStub(throwExceptionFromWasmThunkGenerator);
                     // If for whatever reason we don't have a stub then we should just treat this like a regular crash.
                     if (!exceptionStub)
                         break;
diff --git a/Source/JavaScriptCore/wasm/WasmFormat.h b/Source/JavaScriptCore/wasm/WasmFormat.h
index 8c01fae..d727c59 100644
--- a/Source/JavaScriptCore/wasm/WasmFormat.h
+++ b/Source/JavaScriptCore/wasm/WasmFormat.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2015-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2015-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -256,7 +256,7 @@
 }
 
 struct UnlinkedWasmToWasmCall {
-    CodeLocationNearCall callLocation;
+    CodeLocationNearCall<WasmEntryPtrTag> callLocation;
     size_t functionIndexSpace;
 };
 
@@ -266,7 +266,7 @@
 };
 
 struct InternalFunction {
-    CodeLocationDataLabelPtr calleeMoveLocation;
+    CodeLocationDataLabelPtr<WasmEntryPtrTag> calleeMoveLocation;
     Entrypoint entrypoint;
 };
 
@@ -274,7 +274,7 @@
 // with all imports, and then all internal functions. WasmToWasmImportableFunction and FunctionIndexSpace are only
 // meant as fast lookup tables for these opcodes and do not own code.
 struct WasmToWasmImportableFunction {
-    using LoadLocation = MacroAssemblerCodePtr*;
+    using LoadLocation = MacroAssemblerCodePtr<WasmEntryPtrTag>*;
 #if !COMPILER_SUPPORTS(NSDMI_FOR_AGGREGATES)
     WasmToWasmImportableFunction() = default;
     WasmToWasmImportableFunction(SignatureIndex signatureIndex, LoadLocation entrypointLoadLocation)
diff --git a/Source/JavaScriptCore/wasm/WasmInstance.h b/Source/JavaScriptCore/wasm/WasmInstance.h
index 10ba37f..c70c498 100644
--- a/Source/JavaScriptCore/wasm/WasmInstance.h
+++ b/Source/JavaScriptCore/wasm/WasmInstance.h
@@ -117,7 +117,7 @@
         // Target instance and entrypoint are only set for wasm->wasm calls, and are otherwise nullptr. The embedder-specific logic occurs through import function.
         Instance* targetInstance { nullptr };
         WasmToWasmImportableFunction::LoadLocation wasmEntrypointLoadLocation { nullptr };
-        MacroAssemblerCodePtr wasmToEmbedderStub;
+        MacroAssemblerCodePtr<WasmEntryPtrTag> wasmToEmbedderStub;
         void* importFunction { nullptr }; // In a JS embedding, this is a PoisonedBarrier<JSObject>.
     };
     unsigned numImportFunctions() const { return m_numImportFunctions; }
diff --git a/Source/JavaScriptCore/wasm/WasmOMGPlan.cpp b/Source/JavaScriptCore/wasm/WasmOMGPlan.cpp
index 84e4100..47dead6 100644
--- a/Source/JavaScriptCore/wasm/WasmOMGPlan.cpp
+++ b/Source/JavaScriptCore/wasm/WasmOMGPlan.cpp
@@ -97,14 +97,13 @@
         return;
     }
 
-    PtrTag callTag = ptrTag(WasmCallPtrTag, signature.hash());
     omgEntrypoint.compilation = std::make_unique<B3::Compilation>(
-        FINALIZE_CODE(linkBuffer, callTag, "WebAssembly OMG function[%i] %s", m_functionIndex, signature.toString().ascii().data()),
+        FINALIZE_CODE(linkBuffer, B3CompilationPtrTag, "WebAssembly OMG function[%i] %s", m_functionIndex, signature.toString().ascii().data()),
         WTFMove(context.wasmEntrypointByproducts));
 
     omgEntrypoint.calleeSaveRegisters = WTFMove(parseAndCompileResult.value()->entrypoint.calleeSaveRegisters);
 
-    MacroAssemblerCodePtr entrypoint;
+    MacroAssemblerCodePtr<WasmEntryPtrTag> entrypoint;
     {
         ASSERT(m_codeBlock.ptr() == m_module->codeBlockFor(mode()));
         Ref<Callee> callee = Callee::create(WTFMove(omgEntrypoint), functionIndexSpace, m_moduleInformation->nameSection->get(functionIndexSpace));
@@ -120,16 +119,13 @@
         m_codeBlock->m_optimizedCallees[m_functionIndex] = WTFMove(callee);
 
         for (auto& call : unlinkedCalls) {
-            MacroAssemblerCodePtr entrypoint;
+            MacroAssemblerCodePtr<WasmEntryPtrTag> entrypoint;
             if (call.functionIndexSpace < m_module->moduleInformation().importFunctionCount())
                 entrypoint = m_codeBlock->m_wasmToWasmExitStubs[call.functionIndexSpace].code();
             else
-                entrypoint = m_codeBlock->wasmEntrypointCalleeFromFunctionIndexSpace(call.functionIndexSpace).entrypoint();
+                entrypoint = m_codeBlock->wasmEntrypointCalleeFromFunctionIndexSpace(call.functionIndexSpace).entrypoint().retagged<WasmEntryPtrTag>();
 
-            SignatureIndex signatureIndex = m_moduleInformation->signatureIndexFromFunctionIndexSpace(call.functionIndexSpace);
-            const Signature& signature = SignatureInformation::get(signatureIndex);
-            PtrTag oldTag = ptrTag(WasmCallPtrTag, signature.hash());
-            MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel(entrypoint.retagged(oldTag, NearCodePtrTag)));
+            MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel<WasmEntryPtrTag>(entrypoint));
         }
         unlinkedCalls = std::exchange(m_codeBlock->m_wasmToWasmCallsites[m_functionIndex], unlinkedCalls);
     }
@@ -149,10 +145,7 @@
                 dataLogLnIf(WasmOMGPlanInternal::verbose, "Considering repatching call at: ", RawPointer(call.callLocation.dataLocation()), " that targets ", call.functionIndexSpace);
                 if (call.functionIndexSpace == functionIndexSpace) {
                     dataLogLnIf(WasmOMGPlanInternal::verbose, "Repatching call at: ", RawPointer(call.callLocation.dataLocation()), " to ", RawPointer(entrypoint.executableAddress()));
-                    SignatureIndex signatureIndex = m_moduleInformation->signatureIndexFromFunctionIndexSpace(call.functionIndexSpace);
-                    const Signature& signature = SignatureInformation::get(signatureIndex);
-                    PtrTag oldTag = ptrTag(WasmCallPtrTag, signature.hash());
-                    MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel(entrypoint.retagged(oldTag, NearCodePtrTag)));
+                    MacroAssembler::repatchNearCall(call.callLocation, CodeLocationLabel<WasmEntryPtrTag>(entrypoint));
                 }
             }
 
diff --git a/Source/JavaScriptCore/wasm/WasmThunks.cpp b/Source/JavaScriptCore/wasm/WasmThunks.cpp
index 38a1f41..d49be9e 100644
--- a/Source/JavaScriptCore/wasm/WasmThunks.cpp
+++ b/Source/JavaScriptCore/wasm/WasmThunks.cpp
@@ -40,7 +40,7 @@
 
 namespace JSC { namespace Wasm {
 
-MacroAssemblerCodeRef throwExceptionFromWasmThunkGenerator(const AbstractLocker&)
+MacroAssemblerCodeRef<JITThunkPtrTag> throwExceptionFromWasmThunkGenerator(const AbstractLocker&)
 {
     CCallHelpers jit;
 
@@ -52,19 +52,18 @@
     jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(GPRInfo::argumentGPR0);
     jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
 
-    PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
-    CCallHelpers::Call call = jit.call(tag);
+    CCallHelpers::Call call = jit.call(OperationPtrTag);
     jit.jump(GPRInfo::returnValueGPR, ExceptionHandlerPtrTag);
     jit.breakpoint(); // We should not reach this.
 
     ThrowWasmException throwWasmException = Thunks::singleton().throwWasmException();
     RELEASE_ASSERT(throwWasmException);
     LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
-    linkBuffer.link(call, FunctionPtr(throwWasmException, tag));
-    return FINALIZE_CODE(linkBuffer, NearCodePtrTag, "Throw exception from Wasm");
+    linkBuffer.link(call, FunctionPtr<OperationPtrTag>(throwWasmException));
+    return FINALIZE_CODE(linkBuffer, JITThunkPtrTag, "Throw exception from Wasm");
 }
 
-MacroAssemblerCodeRef throwStackOverflowFromWasmThunkGenerator(const AbstractLocker& locker)
+MacroAssemblerCodeRef<JITThunkPtrTag> throwStackOverflowFromWasmThunkGenerator(const AbstractLocker& locker)
 {
     CCallHelpers jit;
 
@@ -74,11 +73,11 @@
     jit.move(CCallHelpers::TrustedImm32(static_cast<uint32_t>(ExceptionType::StackOverflow)), GPRInfo::argumentGPR1);
     auto jumpToExceptionHandler = jit.jump();
     LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
-    linkBuffer.link(jumpToExceptionHandler, CodeLocationLabel(Thunks::singleton().stub(locker, throwExceptionFromWasmThunkGenerator).code()));
-    return FINALIZE_CODE(linkBuffer, NearCodePtrTag, "Throw stack overflow from Wasm");
+    linkBuffer.link(jumpToExceptionHandler, CodeLocationLabel<JITThunkPtrTag>(Thunks::singleton().stub(locker, throwExceptionFromWasmThunkGenerator).code()));
+    return FINALIZE_CODE(linkBuffer, JITThunkPtrTag, "Throw stack overflow from Wasm");
 }
 
-MacroAssemblerCodeRef triggerOMGTierUpThunkGenerator(const AbstractLocker&)
+MacroAssemblerCodeRef<JITThunkPtrTag> triggerOMGTierUpThunkGenerator(const AbstractLocker&)
 {
     // We expect that the user has already put the function index into GPRInfo::argumentGPR1
     CCallHelpers jit;
@@ -93,16 +92,15 @@
     jit.loadWasmContextInstance(GPRInfo::argumentGPR0);
     typedef void (*Run)(Instance*, uint32_t);
     Run run = OMGPlan::runForIndex;
-    PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
-    jit.move(MacroAssembler::TrustedImmPtr(tagCFunctionPtr(run, tag)), GPRInfo::argumentGPR2);
-    jit.call(GPRInfo::argumentGPR2, tag);
+    jit.move(MacroAssembler::TrustedImmPtr(tagCFunctionPtr<OperationPtrTag>(run)), GPRInfo::argumentGPR2);
+    jit.call(GPRInfo::argumentGPR2, OperationPtrTag);
 
     ScratchRegisterAllocator::restoreRegistersFromStackForCall(jit, registersToSpill, RegisterSet(), numberOfStackBytesUsedForRegisterPreservation, extraPaddingBytes);
 
     jit.emitFunctionEpilogue();
     jit.ret();
     LinkBuffer linkBuffer(jit, GLOBAL_THUNK_ID);
-    return FINALIZE_CODE(linkBuffer, NearCodePtrTag, "Trigger OMG tier up");
+    return FINALIZE_CODE(linkBuffer, JITThunkPtrTag, "Trigger OMG tier up");
 }
 
 static Thunks* thunks;
@@ -130,28 +128,28 @@
     return m_throwWasmException;
 }
 
-MacroAssemblerCodeRef Thunks::stub(ThunkGenerator generator)
+MacroAssemblerCodeRef<JITThunkPtrTag> Thunks::stub(ThunkGenerator generator)
 {
     auto locker = holdLock(m_lock);
     return stub(locker, generator);
 }
 
-MacroAssemblerCodeRef Thunks::stub(const AbstractLocker& locker, ThunkGenerator generator)
+MacroAssemblerCodeRef<JITThunkPtrTag> Thunks::stub(const AbstractLocker& locker, ThunkGenerator generator)
 {
     ASSERT(!!generator);
     {
-        auto addResult = m_stubs.add(generator, MacroAssemblerCodeRef());
+        auto addResult = m_stubs.add(generator, MacroAssemblerCodeRef<JITThunkPtrTag>());
         if (!addResult.isNewEntry)
             return addResult.iterator->value;
     }
 
-    MacroAssemblerCodeRef code = generator(locker);
+    MacroAssemblerCodeRef<JITThunkPtrTag> code = generator(locker);
     // We specifically don't use the iterator here to allow generator to recursively change m_stubs.
     m_stubs.set(generator, code);
     return code;
 }
 
-MacroAssemblerCodeRef Thunks::existingStub(ThunkGenerator generator)
+MacroAssemblerCodeRef<JITThunkPtrTag> Thunks::existingStub(ThunkGenerator generator)
 {
     auto locker = holdLock(m_lock);
 
@@ -159,7 +157,7 @@
     if (iter != m_stubs.end())
         return iter->value;
 
-    return MacroAssemblerCodeRef();
+    return MacroAssemblerCodeRef<JITThunkPtrTag>();
 }
 
 } } // namespace JSC::Wasm
diff --git a/Source/JavaScriptCore/wasm/WasmThunks.h b/Source/JavaScriptCore/wasm/WasmThunks.h
index 529764e..cc05b6d 100644
--- a/Source/JavaScriptCore/wasm/WasmThunks.h
+++ b/Source/JavaScriptCore/wasm/WasmThunks.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -32,11 +32,11 @@
 
 namespace JSC { namespace Wasm {
 
-MacroAssemblerCodeRef throwExceptionFromWasmThunkGenerator(const AbstractLocker&);
-MacroAssemblerCodeRef throwStackOverflowFromWasmThunkGenerator(const AbstractLocker&);
-MacroAssemblerCodeRef triggerOMGTierUpThunkGenerator(const AbstractLocker&);
+MacroAssemblerCodeRef<JITThunkPtrTag> throwExceptionFromWasmThunkGenerator(const AbstractLocker&);
+MacroAssemblerCodeRef<JITThunkPtrTag> throwStackOverflowFromWasmThunkGenerator(const AbstractLocker&);
+MacroAssemblerCodeRef<JITThunkPtrTag> triggerOMGTierUpThunkGenerator(const AbstractLocker&);
 
-typedef MacroAssemblerCodeRef (*ThunkGenerator)(const AbstractLocker&);
+typedef MacroAssemblerCodeRef<JITThunkPtrTag> (*ThunkGenerator)(const AbstractLocker&);
 
 class Thunks {
 public:
@@ -46,14 +46,14 @@
     void setThrowWasmException(ThrowWasmException);
     ThrowWasmException throwWasmException();
 
-    MacroAssemblerCodeRef stub(ThunkGenerator);
-    MacroAssemblerCodeRef stub(const AbstractLocker&, ThunkGenerator);
-    MacroAssemblerCodeRef existingStub(ThunkGenerator);
+    MacroAssemblerCodeRef<JITThunkPtrTag> stub(ThunkGenerator);
+    MacroAssemblerCodeRef<JITThunkPtrTag> stub(const AbstractLocker&, ThunkGenerator);
+    MacroAssemblerCodeRef<JITThunkPtrTag> existingStub(ThunkGenerator);
 
 private:
     Thunks() = default;
 
-    HashMap<ThunkGenerator, MacroAssemblerCodeRef> m_stubs;
+    HashMap<ThunkGenerator, MacroAssemblerCodeRef<JITThunkPtrTag>> m_stubs;
     ThrowWasmException m_throwWasmException { nullptr };
     Lock m_lock;
 };
diff --git a/Source/JavaScriptCore/wasm/js/JSToWasm.cpp b/Source/JavaScriptCore/wasm/js/JSToWasm.cpp
index d1ca4c8..fcb9b0c 100644
--- a/Source/JavaScriptCore/wasm/js/JSToWasm.cpp
+++ b/Source/JavaScriptCore/wasm/js/JSToWasm.cpp
@@ -45,9 +45,9 @@
     jit.store64(CCallHelpers::TrustedImm64(0), CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::codeBlock * static_cast<int>(sizeof(Register))));
     MacroAssembler::DataLabelPtr calleeMoveLocation = jit.moveWithPatch(MacroAssembler::TrustedImmPtr(nullptr), GPRInfo::nonPreservedNonReturnGPR);
     jit.storePtr(GPRInfo::nonPreservedNonReturnGPR, CCallHelpers::Address(GPRInfo::callFrameRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register))));
-    CodeLocationDataLabelPtr* linkedCalleeMove = &result->calleeMoveLocation;
+    CodeLocationDataLabelPtr<WasmEntryPtrTag>* linkedCalleeMove = &result->calleeMoveLocation;
     jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-        *linkedCalleeMove = linkBuffer.locationOf(calleeMoveLocation);
+        *linkedCalleeMove = linkBuffer.locationOf<WasmEntryPtrTag>(calleeMoveLocation);
     });
 
     const PinnedRegisterInfo& pinnedRegs = PinnedRegisterInfo::get();
@@ -195,7 +195,7 @@
     unsigned functionIndexSpace = functionIndex + info.importFunctionCount();
     ASSERT(functionIndexSpace < info.functionIndexSpaceSize());
     jit.addLinkTask([unlinkedWasmToWasmCalls, call, functionIndexSpace] (LinkBuffer& linkBuffer) {
-        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall(call), functionIndexSpace });
+        unlinkedWasmToWasmCalls->append({ linkBuffer.locationOfNearCall<WasmEntryPtrTag>(call), functionIndexSpace });
     });
 
 
diff --git a/Source/JavaScriptCore/wasm/js/JSWebAssemblyCodeBlock.h b/Source/JavaScriptCore/wasm/js/JSWebAssemblyCodeBlock.h
index 28b7c0a..2542af0 100644
--- a/Source/JavaScriptCore/wasm/js/JSWebAssemblyCodeBlock.h
+++ b/Source/JavaScriptCore/wasm/js/JSWebAssemblyCodeBlock.h
@@ -68,7 +68,7 @@
 
     Wasm::CodeBlock& codeBlock() { return m_codeBlock.get(); }
     
-    MacroAssemblerCodePtr wasmToEmbedderStub(size_t importFunctionNum) { return m_wasmToJSExitStubs[importFunctionNum].code(); }
+    MacroAssemblerCodePtr<WasmEntryPtrTag> wasmToEmbedderStub(size_t importFunctionNum) { return m_wasmToJSExitStubs[importFunctionNum].code(); }
 
     void finishCreation(VM&);
 
@@ -98,7 +98,7 @@
     };
 
     PoisonedRef<JSWebAssemblyCodeBlockPoison, Wasm::CodeBlock> m_codeBlock;
-    Vector<MacroAssemblerCodeRef> m_wasmToJSExitStubs;
+    Vector<MacroAssemblerCodeRef<WasmEntryPtrTag>> m_wasmToJSExitStubs;
     PoisonedUniquePtr<JSWebAssemblyCodeBlockPoison, UnconditionalFinalizer> m_unconditionalFinalizer;
     Bag<CallLinkInfo> m_callLinkInfos;
     String m_errorMessage;
diff --git a/Source/JavaScriptCore/wasm/js/WasmToJS.cpp b/Source/JavaScriptCore/wasm/js/WasmToJS.cpp
index f172843..3ffec3a 100644
--- a/Source/JavaScriptCore/wasm/js/WasmToJS.cpp
+++ b/Source/JavaScriptCore/wasm/js/WasmToJS.cpp
@@ -54,7 +54,7 @@
     jit.xor64(poison, result);
 }
 
-static Expected<MacroAssemblerCodeRef, BindingFailure> handleBadI64Use(VM* vm, JIT& jit, const Signature& signature, unsigned importIndex)
+static Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> handleBadI64Use(VM* vm, JIT& jit, const Signature& signature, unsigned importIndex)
 {
     unsigned argCount = signature.argumentCount();
 
@@ -93,8 +93,7 @@
         // Let's be paranoid on the exception path and zero out the poison instead of leaving it in an argument GPR.
         jit.move(CCallHelpers::TrustedImm32(0), GPRInfo::argumentGPR3);
 
-        PtrTag callTag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
-        auto call = jit.call(callTag);
+        auto call = jit.call(OperationPtrTag);
         jit.jumpToExceptionHandler(*vm);
 
         void (*throwBadI64)(ExecState*, JSWebAssemblyInstance*) = [] (ExecState* exec, JSWebAssemblyInstance* instance) -> void {
@@ -116,15 +115,14 @@
         if (UNLIKELY(linkBuffer.didFailToAllocate()))
             return makeUnexpected(BindingFailure::OutOfMemory);
 
-        linkBuffer.link(call, FunctionPtr(throwBadI64, callTag));
-        PtrTag tag = ptrTag(WasmCallPtrTag, signature.hash());
-        return FINALIZE_CODE(linkBuffer, tag, "WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex);
+        linkBuffer.link(call, FunctionPtr<OperationPtrTag>(throwBadI64));
+        return FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript invalid i64 use in import[%i]", importIndex);
     }
     
-    return MacroAssemblerCodeRef();
+    return MacroAssemblerCodeRef<WasmEntryPtrTag>();
 }
 
-Expected<MacroAssemblerCodeRef, BindingFailure> wasmToJS(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
+Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToJS(VM* vm, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex signatureIndex, unsigned importIndex)
 {
     // FIXME: This function doesn't properly abstract away the calling convention.
     // It'd be super easy to do so: https://bugs.webkit.org/show_bug.cgi?id=169401
@@ -302,12 +300,9 @@
         // Let's be paranoid before the call and zero out the poison instead of leaving it in an argument GPR.
         jit.move(CCallHelpers::TrustedImm32(0), GPRInfo::argumentGPR3);
 
-        PtrTag callTag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
-        PtrTag doUnwindingTag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
-
         static_assert(GPRInfo::numberOfArgumentRegisters >= 4, "We rely on this with the call below.");
         jit.setupArguments<decltype(callFunc)>(GPRInfo::argumentGPR1, CCallHelpers::TrustedImm32(signatureIndex), CCallHelpers::TrustedImmPtr(buffer));
-        auto call = jit.call(callTag);
+        auto call = jit.call(OperationPtrTag);
         auto noException = jit.emitExceptionCheck(*vm, AssemblyHelpers::InvertedExceptionCheck);
 
         // Exception here.
@@ -319,7 +314,7 @@
             genericUnwind(vm, exec);
             ASSERT(!!vm->callFrameForCatch);
         };
-        auto exceptionCall = jit.call(doUnwindingTag);
+        auto exceptionCall = jit.call(OperationPtrTag);
         jit.jumpToExceptionHandler(*vm);
 
         noException.link(&jit);
@@ -344,11 +339,10 @@
         if (UNLIKELY(linkBuffer.didFailToAllocate()))
             return makeUnexpected(BindingFailure::OutOfMemory);
 
-        linkBuffer.link(call, FunctionPtr(callFunc, callTag));
-        linkBuffer.link(exceptionCall, FunctionPtr(doUnwinding, doUnwindingTag));
+        linkBuffer.link(call, FunctionPtr<OperationPtrTag>(callFunc));
+        linkBuffer.link(exceptionCall, FunctionPtr<OperationPtrTag>(doUnwinding));
 
-        PtrTag tag = ptrTag(WasmCallPtrTag, signature.hash());
-        return FINALIZE_CODE(linkBuffer, tag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
+        return FINALIZE_CODE(linkBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
     }
 
     // Note: We don't need to perform a stack check here since WasmB3IRGenerator
@@ -550,14 +544,13 @@
         jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, GPRInfo::returnValueGPR);
         done.append(jit.jump());
 
-        PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
         slowPath.link(&jit);
         jit.setupArguments<decltype(convertToI32)>(GPRInfo::returnValueGPR);
-        auto call = jit.call(tag);
+        auto call = jit.call(OperationPtrTag);
         exceptionChecks.append(jit.emitJumpIfException(*vm));
 
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(call, FunctionPtr(convertToI32, tag));
+            linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToI32));
         });
 
         done.link(&jit);
@@ -586,14 +579,13 @@
         jit.convertDoubleToFloat(FPRInfo::returnValueFPR, FPRInfo::returnValueFPR);
         done.append(jit.jump());
 
-        PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
         notANumber.link(&jit);
         jit.setupArguments<decltype(convertToF32)>(GPRInfo::returnValueGPR);
-        auto call = jit.call(tag);
+        auto call = jit.call(OperationPtrTag);
         exceptionChecks.append(jit.emitJumpIfException(*vm));
 
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(call, FunctionPtr(convertToF32, tag));
+            linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToF32));
         });
 
         done.link(&jit);
@@ -621,14 +613,13 @@
         jit.move64ToDouble(GPRInfo::returnValueGPR, FPRInfo::returnValueFPR);
         done.append(jit.jump());
 
-        PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
         notANumber.link(&jit);
         jit.setupArguments<decltype(convertToF64)>(GPRInfo::returnValueGPR);
-        auto call = jit.call(tag);
+        auto call = jit.call(OperationPtrTag);
         exceptionChecks.append(jit.emitJumpIfException(*vm));
 
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(call, FunctionPtr(convertToF64, tag));
+            linkBuffer.link(call, FunctionPtr<OperationPtrTag>(convertToF64));
         });
 
         done.link(&jit);
@@ -640,11 +631,10 @@
     jit.ret();
 
     if (!exceptionChecks.empty()) {
-        PtrTag tag = ptrTag(WasmHelperPtrTag, nextPtrTagID());
         exceptionChecks.link(&jit);
         jit.copyCalleeSavesToEntryFrameCalleeSavesBuffer(vm->topEntryFrame);
         jit.move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0);
-        auto call = jit.call(tag);
+        auto call = jit.call(OperationPtrTag);
         jit.jumpToExceptionHandler(*vm);
 
         void (*doUnwinding)(ExecState*) = [] (ExecState* exec) -> void {
@@ -655,7 +645,7 @@
         };
 
         jit.addLinkTask([=] (LinkBuffer& linkBuffer) {
-            linkBuffer.link(call, FunctionPtr(doUnwinding, tag));
+            linkBuffer.link(call, FunctionPtr<OperationPtrTag>(doUnwinding));
         });
     }
 
@@ -663,15 +653,13 @@
     if (UNLIKELY(patchBuffer.didFailToAllocate()))
         return makeUnexpected(BindingFailure::OutOfMemory);
 
-    PtrTag linkTag = ptrTag(LinkCallPtrTag, vm);
-    patchBuffer.link(slowCall, FunctionPtr(vm->getCTIStub(linkCallThunkGenerator).retaggedCode(linkTag, NearCodePtrTag)));
-    CodeLocationLabel callReturnLocation(patchBuffer.locationOfNearCall(slowCall));
-    CodeLocationLabel hotPathBegin(patchBuffer.locationOf(targetToCheck));
-    CodeLocationNearCall hotPathOther = patchBuffer.locationOfNearCall(fastCall);
+    patchBuffer.link(slowCall, FunctionPtr<JITThunkPtrTag>(vm->getCTIStub(linkCallThunkGenerator).code()));
+    CodeLocationLabel<JSEntryPtrTag> callReturnLocation(patchBuffer.locationOfNearCall<JSEntryPtrTag>(slowCall));
+    CodeLocationLabel<JSEntryPtrTag> hotPathBegin(patchBuffer.locationOf<JSEntryPtrTag>(targetToCheck));
+    CodeLocationNearCall<JSEntryPtrTag> hotPathOther = patchBuffer.locationOfNearCall<JSEntryPtrTag>(fastCall);
     callLinkInfo->setCallLocations(callReturnLocation, hotPathBegin, hotPathOther);
 
-    PtrTag tag = ptrTag(WasmCallPtrTag, signature.hash());
-    return FINALIZE_CODE(patchBuffer, tag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
+    return FINALIZE_CODE(patchBuffer, WasmEntryPtrTag, "WebAssembly->JavaScript import[%i] %s", importIndex, signature.toString().ascii().data());
 }
 
 void* wasmToJSException(ExecState* exec, Wasm::ExceptionType type, Instance* wasmInstance)
diff --git a/Source/JavaScriptCore/wasm/js/WasmToJS.h b/Source/JavaScriptCore/wasm/js/WasmToJS.h
index cc29425..e0c74c3 100644
--- a/Source/JavaScriptCore/wasm/js/WasmToJS.h
+++ b/Source/JavaScriptCore/wasm/js/WasmToJS.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
+ * Copyright (C) 2016-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -42,7 +42,7 @@
 
 class Instance;
 
-Expected<MacroAssemblerCodeRef, BindingFailure> wasmToJS(VM*, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex, unsigned importIndex);
+Expected<MacroAssemblerCodeRef<WasmEntryPtrTag>, BindingFailure> wasmToJS(VM*, Bag<CallLinkInfo>& callLinkInfos, SignatureIndex, unsigned importIndex);
 
 void* wasmToJSException(ExecState*, Wasm::ExceptionType, Instance*);
 
diff --git a/Source/JavaScriptCore/wasm/js/WebAssemblyFunction.h b/Source/JavaScriptCore/wasm/js/WebAssemblyFunction.h
index 402ae99..3443900 100644
--- a/Source/JavaScriptCore/wasm/js/WebAssemblyFunction.h
+++ b/Source/JavaScriptCore/wasm/js/WebAssemblyFunction.h
@@ -64,7 +64,7 @@
     WasmToWasmImportableFunction::LoadLocation entrypointLoadLocation() const { return m_importableFunction.entrypointLoadLocation; }
     WasmToWasmImportableFunction importableFunction() const { return m_importableFunction; }
 
-    MacroAssemblerCodePtr jsEntrypoint(ArityCheckMode arity)
+    MacroAssemblerCodePtr<WasmEntryPtrTag> jsEntrypoint(ArityCheckMode arity)
     {
         if (arity == ArityCheckNotRequired)
             return m_jsEntrypoint;
@@ -80,7 +80,7 @@
     // It's safe to just hold the raw WasmToWasmImportableFunction/jsEntrypoint because we have a reference
     // to our Instance, which points to the Module that exported us, which
     // ensures that the actual Signature/code doesn't get deallocated.
-    MacroAssemblerCodePtr m_jsEntrypoint;
+    MacroAssemblerCodePtr<WasmEntryPtrTag> m_jsEntrypoint;
     WasmToWasmImportableFunction m_importableFunction;
 };
 
diff --git a/Source/JavaScriptCore/yarr/YarrJIT.cpp b/Source/JavaScriptCore/yarr/YarrJIT.cpp
index d4fc4d6..ab70f6d 100644
--- a/Source/JavaScriptCore/yarr/YarrJIT.cpp
+++ b/Source/JavaScriptCore/yarr/YarrJIT.cpp
@@ -604,7 +604,7 @@
 
     void loadFromFrameAndJump(unsigned frameLocation)
     {
-        jump(Address(stackPointerRegister, frameLocation * sizeof(void*)), ptrTag(YarrBacktrackPtrTag, &m_codeBlock));
+        jump(Address(stackPointerRegister, frameLocation * sizeof(void*)), YarrBacktrackPtrTag);
     }
 
     unsigned alignCallFrameSizeInBytes(unsigned callFrameSize)
@@ -927,11 +927,11 @@
         }
 
         // Called at the end of code generation to link all return addresses.
-        void linkDataLabels(LinkBuffer& linkBuffer, YarrCodeBlock& codeBlock)
+        void linkDataLabels(LinkBuffer& linkBuffer)
         {
             ASSERT(isEmpty());
             for (unsigned i = 0; i < m_backtrackRecords.size(); ++i)
-                linkBuffer.patch(m_backtrackRecords[i].m_dataLabel, linkBuffer.locationOf(m_backtrackRecords[i].m_backtrackLocation, ptrTag(YarrBacktrackPtrTag, &codeBlock)));
+                linkBuffer.patch(m_backtrackRecords[i].m_dataLabel, linkBuffer.locationOf<YarrBacktrackPtrTag>(m_backtrackRecords[i].m_backtrackLocation));
         }
 
     private:
@@ -3508,24 +3508,24 @@
         }
 
         if (!m_tryReadUnicodeCharacterCalls.isEmpty()) {
-            CodeLocationLabel tryReadUnicodeCharacterHelper = linkBuffer.locationOf(m_tryReadUnicodeCharacterEntry, NearCodePtrTag);
+            CodeLocationLabel<NoPtrTag> tryReadUnicodeCharacterHelper = linkBuffer.locationOf<NoPtrTag>(m_tryReadUnicodeCharacterEntry);
 
             for (auto call : m_tryReadUnicodeCharacterCalls)
                 linkBuffer.link(call, tryReadUnicodeCharacterHelper);
         }
 
-        m_backtrackingState.linkDataLabels(linkBuffer, codeBlock);
+        m_backtrackingState.linkDataLabels(linkBuffer);
 
         if (compileMode == MatchOnly) {
             if (m_charSize == Char8)
-                codeBlock.set8BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ptrTag(YarrMatchOnly8BitPtrTag, &codeBlock), "Match-only 8-bit regular expression"));
+                codeBlock.set8BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, YarrMatchOnly8BitPtrTag, "Match-only 8-bit regular expression"));
             else
-                codeBlock.set16BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, ptrTag(YarrMatchOnly16BitPtrTag, &codeBlock), "Match-only 16-bit regular expression"));
+                codeBlock.set16BitCodeMatchOnly(FINALIZE_CODE(linkBuffer, YarrMatchOnly16BitPtrTag, "Match-only 16-bit regular expression"));
         } else {
             if (m_charSize == Char8)
-                codeBlock.set8BitCode(FINALIZE_CODE(linkBuffer, ptrTag(Yarr8BitPtrTag, &codeBlock), "8-bit regular expression"));
+                codeBlock.set8BitCode(FINALIZE_CODE(linkBuffer, Yarr8BitPtrTag, "8-bit regular expression"));
             else
-                codeBlock.set16BitCode(FINALIZE_CODE(linkBuffer, ptrTag(Yarr16BitPtrTag, &codeBlock), "16-bit regular expression"));
+                codeBlock.set16BitCode(FINALIZE_CODE(linkBuffer, Yarr16BitPtrTag, "16-bit regular expression"));
         }
         if (m_failureReason)
             codeBlock.setFallBackWithFailureReason(*m_failureReason);
diff --git a/Source/JavaScriptCore/yarr/YarrJIT.h b/Source/JavaScriptCore/yarr/YarrJIT.h
index dbb73c8..07d3998 100644
--- a/Source/JavaScriptCore/yarr/YarrJIT.h
+++ b/Source/JavaScriptCore/yarr/YarrJIT.h
@@ -86,13 +86,13 @@
 
     bool has8BitCode() { return m_ref8.size(); }
     bool has16BitCode() { return m_ref16.size(); }
-    void set8BitCode(MacroAssemblerCodeRef ref) { m_ref8 = ref; }
-    void set16BitCode(MacroAssemblerCodeRef ref) { m_ref16 = ref; }
+    void set8BitCode(MacroAssemblerCodeRef<Yarr8BitPtrTag> ref) { m_ref8 = ref; }
+    void set16BitCode(MacroAssemblerCodeRef<Yarr16BitPtrTag> ref) { m_ref16 = ref; }
 
     bool has8BitCodeMatchOnly() { return m_matchOnly8.size(); }
     bool has16BitCodeMatchOnly() { return m_matchOnly16.size(); }
-    void set8BitCodeMatchOnly(MacroAssemblerCodeRef matchOnly) { m_matchOnly8 = matchOnly; }
-    void set16BitCodeMatchOnly(MacroAssemblerCodeRef matchOnly) { m_matchOnly16 = matchOnly; }
+    void set8BitCodeMatchOnly(MacroAssemblerCodeRef<YarrMatchOnly8BitPtrTag> matchOnly) { m_matchOnly8 = matchOnly; }
+    void set16BitCodeMatchOnly(MacroAssemblerCodeRef<YarrMatchOnly16BitPtrTag> matchOnly) { m_matchOnly16 = matchOnly; }
 
 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
     bool usesPatternContextBuffer() { return m_usesPatternContextBuffer; }
@@ -101,25 +101,25 @@
     MatchResult execute(const LChar* input, unsigned start, unsigned length, int* output, void* freeParenContext, unsigned parenContextSize)
     {
         ASSERT(has8BitCode());
-        return MatchResult(untagCFunctionPtr<YarrJITCode8>(m_ref8.code().executableAddress(), ptrTag(Yarr8BitPtrTag, this))(input, start, length, output, freeParenContext, parenContextSize));
+        return MatchResult(untagCFunctionPtr<YarrJITCode8, Yarr8BitPtrTag>(m_ref8.code().executableAddress())(input, start, length, output, freeParenContext, parenContextSize));
     }
 
     MatchResult execute(const UChar* input, unsigned start, unsigned length, int* output, void* freeParenContext, unsigned parenContextSize)
     {
         ASSERT(has16BitCode());
-        return MatchResult(untagCFunctionPtr<YarrJITCode16>(m_ref16.code().executableAddress(), ptrTag(Yarr16BitPtrTag, this))(input, start, length, output, freeParenContext, parenContextSize));
+        return MatchResult(untagCFunctionPtr<YarrJITCode16, Yarr16BitPtrTag>(m_ref16.code().executableAddress())(input, start, length, output, freeParenContext, parenContextSize));
     }
 
     MatchResult execute(const LChar* input, unsigned start, unsigned length, void* freeParenContext, unsigned parenContextSize)
     {
         ASSERT(has8BitCodeMatchOnly());
-        return MatchResult(untagCFunctionPtr<YarrJITCodeMatchOnly8>(m_matchOnly8.code().executableAddress(), ptrTag(YarrMatchOnly8BitPtrTag, this))(input, start, length, 0, freeParenContext, parenContextSize));
+        return MatchResult(untagCFunctionPtr<YarrJITCodeMatchOnly8, YarrMatchOnly8BitPtrTag>(m_matchOnly8.code().executableAddress())(input, start, length, 0, freeParenContext, parenContextSize));
     }
 
     MatchResult execute(const UChar* input, unsigned start, unsigned length, void* freeParenContext, unsigned parenContextSize)
     {
         ASSERT(has16BitCodeMatchOnly());
-        return MatchResult(untagCFunctionPtr<YarrJITCodeMatchOnly16>(m_matchOnly16.code().executableAddress(), ptrTag(YarrMatchOnly16BitPtrTag, this))(input, start, length, 0, freeParenContext, parenContextSize));
+        return MatchResult(untagCFunctionPtr<YarrJITCodeMatchOnly16, YarrMatchOnly16BitPtrTag>(m_matchOnly16.code().executableAddress())(input, start, length, 0, freeParenContext, parenContextSize));
     }
 #else
     MatchResult execute(const LChar* input, unsigned start, unsigned length, int* output)
@@ -188,18 +188,18 @@
 
     void clear()
     {
-        m_ref8 = MacroAssemblerCodeRef();
-        m_ref16 = MacroAssemblerCodeRef();
-        m_matchOnly8 = MacroAssemblerCodeRef();
-        m_matchOnly16 = MacroAssemblerCodeRef();
+        m_ref8 = MacroAssemblerCodeRef<Yarr8BitPtrTag>();
+        m_ref16 = MacroAssemblerCodeRef<Yarr16BitPtrTag>();
+        m_matchOnly8 = MacroAssemblerCodeRef<YarrMatchOnly8BitPtrTag>();
+        m_matchOnly16 = MacroAssemblerCodeRef<YarrMatchOnly16BitPtrTag>();
         m_failureReason = std::nullopt;
     }
 
 private:
-    MacroAssemblerCodeRef m_ref8;
-    MacroAssemblerCodeRef m_ref16;
-    MacroAssemblerCodeRef m_matchOnly8;
-    MacroAssemblerCodeRef m_matchOnly16;
+    MacroAssemblerCodeRef<Yarr8BitPtrTag> m_ref8;
+    MacroAssemblerCodeRef<Yarr16BitPtrTag> m_ref16;
+    MacroAssemblerCodeRef<YarrMatchOnly8BitPtrTag> m_matchOnly8;
+    MacroAssemblerCodeRef<YarrMatchOnly16BitPtrTag> m_matchOnly16;
 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
     bool m_usesPatternContextBuffer;
 #endif
diff --git a/Source/WebCore/ChangeLog b/Source/WebCore/ChangeLog
index d18975d..51a9d7d 100644
--- a/Source/WebCore/ChangeLog
+++ b/Source/WebCore/ChangeLog
@@ -1,3 +1,40 @@
+2018-04-17  Mark Lam  <mark.lam@apple.com>
+
+        Templatize CodePtr/Refs/FunctionPtrs with PtrTags.
+        https://bugs.webkit.org/show_bug.cgi?id=184702
+        <rdar://problem/35391681>
+
+        Reviewed by Filip Pizlo and Saam Barati.
+
+        No new tests. This is covered by existing tests.
+
+        * WebCore.xcodeproj/project.pbxproj:
+        * css/ElementRuleCollector.cpp:
+        (WebCore::ElementRuleCollector::ruleMatches):
+        * cssjit/CSSPtrTag.h: Added.
+        * cssjit/CompiledSelector.h:
+        * cssjit/FunctionCall.h:
+        (WebCore::FunctionCall::FunctionCall):
+        (WebCore::FunctionCall::setFunctionAddress):
+        (WebCore::FunctionCall::prepareAndCall):
+        * cssjit/SelectorCompiler.cpp:
+        (WebCore::SelectorCompiler::compileSelector):
+        (WebCore::SelectorCompiler::SelectorFragment::appendUnoptimizedPseudoClassWithContext):
+        (WebCore::SelectorCompiler::addPseudoClassType):
+        (WebCore::SelectorCompiler::SelectorCodeGenerator::compile):
+        (WebCore::SelectorCompiler::SelectorCodeGenerator::generateElementAttributeFunctionCallValueMatching):
+        (WebCore::SelectorCompiler::SelectorCodeGenerator::generateElementFunctionCallTest):
+        (WebCore::SelectorCompiler::SelectorCodeGenerator::generateContextFunctionCallTest):
+        * cssjit/SelectorCompiler.h:
+        (WebCore::SelectorCompiler::ruleCollectorSimpleSelectorCheckerFunction):
+        (WebCore::SelectorCompiler::querySelectorSimpleSelectorCheckerFunction):
+        (WebCore::SelectorCompiler::ruleCollectorSelectorCheckerFunctionWithCheckingContext):
+        (WebCore::SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext):
+        * dom/SelectorQuery.cpp:
+        (WebCore::SelectorDataList::executeCompiledSingleMultiSelectorData const):
+        (WebCore::SelectorDataList::execute const):
+        * dom/SelectorQuery.h:
+
 2018-04-17  Tadeu Zagallo  <tzagallo@apple.com>
 
         Retain MessagePortChannel for transfer when disentangling ports
diff --git a/Source/WebCore/WebCore.xcodeproj/project.pbxproj b/Source/WebCore/WebCore.xcodeproj/project.pbxproj
index 31fbbff..fb20ba9 100644
--- a/Source/WebCore/WebCore.xcodeproj/project.pbxproj
+++ b/Source/WebCore/WebCore.xcodeproj/project.pbxproj
@@ -4949,6 +4949,7 @@
 		FDF6BAF9134A4C9800822920 /* JSOfflineAudioCompletionEvent.h in Headers */ = {isa = PBXBuildFile; fileRef = FDF6BAF7134A4C9800822920 /* JSOfflineAudioCompletionEvent.h */; };
 		FDF7E9C413AC21DB00A51EAC /* JSAudioBufferCallback.h in Headers */ = {isa = PBXBuildFile; fileRef = FDF7E9C213AC21DB00A51EAC /* JSAudioBufferCallback.h */; };
 		FE0D84E910484348001A179E /* WebEvent.h in Headers */ = {isa = PBXBuildFile; fileRef = FE0D84E810484348001A179E /* WebEvent.h */; settings = {ATTRIBUTES = (Private, ); }; };
+		FE271F672082DBE500A952D4 /* CSSPtrTag.h in Headers */ = {isa = PBXBuildFile; fileRef = FE271F642082DBBE00A952D4 /* CSSPtrTag.h */; };
 		FE36FD1516C7826500F887C1 /* ChangeVersionData.h in Headers */ = {isa = PBXBuildFile; fileRef = FE36FD1116C7826400F887C1 /* ChangeVersionData.h */; };
 		FE36FD1716C7826500F887C1 /* SQLTransactionStateMachine.h in Headers */ = {isa = PBXBuildFile; fileRef = FE36FD1316C7826400F887C1 /* SQLTransactionStateMachine.h */; };
 		FE36FD1816C7826500F887C1 /* SQLTransactionState.h in Headers */ = {isa = PBXBuildFile; fileRef = FE36FD1416C7826400F887C1 /* SQLTransactionState.h */; };
@@ -14796,6 +14797,7 @@
 		FDF7E9C213AC21DB00A51EAC /* JSAudioBufferCallback.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSAudioBufferCallback.h; sourceTree = "<group>"; };
 		FE0D84E810484348001A179E /* WebEvent.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebEvent.h; sourceTree = "<group>"; };
 		FE0D84EA1048436E001A179E /* WebEvent.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = WebEvent.mm; sourceTree = "<group>"; };
+		FE271F642082DBBE00A952D4 /* CSSPtrTag.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CSSPtrTag.h; sourceTree = "<group>"; };
 		FE36FD1116C7826400F887C1 /* ChangeVersionData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ChangeVersionData.h; sourceTree = "<group>"; };
 		FE36FD1216C7826400F887C1 /* SQLTransactionStateMachine.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SQLTransactionStateMachine.cpp; sourceTree = "<group>"; };
 		FE36FD1316C7826400F887C1 /* SQLTransactionStateMachine.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SQLTransactionStateMachine.h; sourceTree = "<group>"; };
@@ -16177,6 +16179,7 @@
 		26B9998D1803ADFA00D01121 /* cssjit */ = {
 			isa = PBXGroup;
 			children = (
+				FE271F642082DBBE00A952D4 /* CSSPtrTag.h */,
 				E4451077202C7E0100657D33 /* CompiledSelector.h */,
 				26B999921803B9D900D01121 /* FunctionCall.h */,
 				26B9998E1803AE7200D01121 /* RegisterAllocator.h */,
@@ -27644,6 +27647,7 @@
 				6E0E569C183BFFE600E0E8D5 /* FloatRoundedRect.h in Headers */,
 				B275356D0B053814002CE64F /* FloatSize.h in Headers */,
 				58CD35CB18EB4C3900B9F3AC /* FloatSizeHash.h in Headers */,
+				FE271F672082DBE500A952D4 /* CSSPtrTag.h in Headers */,
 				14993BE60B2F2B1C0050497F /* FocusController.h in Headers */,
 				062287840B4DB322000C34DF /* FocusDirection.h in Headers */,
 				B6D9D23514EABD260090D75E /* FocusEvent.h in Headers */,
diff --git a/Source/WebCore/css/ElementRuleCollector.cpp b/Source/WebCore/css/ElementRuleCollector.cpp
index 8871be2..b309301 100644
--- a/Source/WebCore/css/ElementRuleCollector.cpp
+++ b/Source/WebCore/css/ElementRuleCollector.cpp
@@ -392,7 +392,7 @@
     }
 
     if (compiledSelectorChecker && compiledSelector.status == SelectorCompilationStatus::SimpleSelectorChecker) {
-        auto selectorChecker = SelectorCompiler::ruleCollectorSimpleSelectorCheckerFunction(compiledSelector.codeRef, compiledSelectorChecker, compiledSelector.status);
+        auto selectorChecker = SelectorCompiler::ruleCollectorSimpleSelectorCheckerFunction(compiledSelectorChecker, compiledSelector.status);
 #if !ASSERT_MSG_DISABLED
         unsigned ignoreSpecificity;
         ASSERT_WITH_MESSAGE(!selectorChecker(&m_element, &ignoreSpecificity) || m_pseudoStyleRequest.pseudoId == NOPSEUDO, "When matching pseudo elements, we should never compile a selector checker without context unless it cannot match anything.");
@@ -420,7 +420,7 @@
     if (compiledSelectorChecker) {
         ASSERT(compiledSelector.status == SelectorCompilationStatus::SelectorCheckerWithCheckingContext);
 
-        auto selectorChecker = SelectorCompiler::ruleCollectorSelectorCheckerFunctionWithCheckingContext(compiledSelector.codeRef, compiledSelectorChecker, compiledSelector.status);
+        auto selectorChecker = SelectorCompiler::ruleCollectorSelectorCheckerFunctionWithCheckingContext(compiledSelectorChecker, compiledSelector.status);
 
 #if CSS_SELECTOR_JIT_PROFILING
         compiledSelector.useCount++;
diff --git a/Source/WebCore/cssjit/CSSPtrTag.h b/Source/WebCore/cssjit/CSSPtrTag.h
new file mode 100644
index 0000000..0c9bb07
--- /dev/null
+++ b/Source/WebCore/cssjit/CSSPtrTag.h
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2018 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#pragma once
+
+#if ENABLE(CSS_SELECTOR_JIT)
+
+#include <JavaScriptCore/PtrTag.h>
+
+namespace WebCore {
+
+static constexpr JSC::PtrTag CSSSelectorPtrTag = static_cast<JSC::PtrTag>(0xc551);
+static constexpr JSC::PtrTag CSSOperationPtrTag = static_cast<JSC::PtrTag>(0xc552);
+
+} // namespace WebCore
+
+#endif // ENABLE(CSS_SELECTOR_JIT)
diff --git a/Source/WebCore/cssjit/CompiledSelector.h b/Source/WebCore/cssjit/CompiledSelector.h
index 099ac93..7ea4850 100644
--- a/Source/WebCore/cssjit/CompiledSelector.h
+++ b/Source/WebCore/cssjit/CompiledSelector.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2018 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -27,6 +27,7 @@
 
 #if ENABLE(CSS_SELECTOR_JIT)
 
+#include "CSSPtrTag.h"
 #include <JavaScriptCore/MacroAssemblerCodeRef.h>
 
 namespace WebCore {
@@ -57,7 +58,7 @@
 struct CompiledSelector {
     WTF_MAKE_STRUCT_FAST_ALLOCATED;
     SelectorCompilationStatus status;
-    JSC::MacroAssemblerCodeRef codeRef;
+    JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag> codeRef;
 #if defined(CSS_SELECTOR_JIT_PROFILING) && CSS_SELECTOR_JIT_PROFILING
     unsigned useCount { 0 };
 #endif
diff --git a/Source/WebCore/cssjit/FunctionCall.h b/Source/WebCore/cssjit/FunctionCall.h
index 9b5166b..346cf28 100644
--- a/Source/WebCore/cssjit/FunctionCall.h
+++ b/Source/WebCore/cssjit/FunctionCall.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -27,6 +27,7 @@
 
 #if ENABLE(CSS_SELECTOR_JIT)
 
+#include "CSSPtrTag.h"
 #include "RegisterAllocator.h"
 #include "StackAllocator.h"
 #include <JavaScriptCore/GPRInfo.h>
@@ -36,7 +37,7 @@
 
 class FunctionCall {
 public:
-    FunctionCall(JSC::MacroAssembler& assembler, RegisterAllocator& registerAllocator, StackAllocator& stackAllocator, Vector<std::pair<JSC::MacroAssembler::Call, JSC::FunctionPtr>, 32>& callRegistry)
+    FunctionCall(JSC::MacroAssembler& assembler, RegisterAllocator& registerAllocator, StackAllocator& stackAllocator, Vector<std::pair<JSC::MacroAssembler::Call, JSC::FunctionPtr<CSSOperationPtrTag>>, 32>& callRegistry)
         : m_assembler(assembler)
         , m_registerAllocator(registerAllocator)
         , m_stackAllocator(stackAllocator)
@@ -47,7 +48,7 @@
     {
     }
 
-    void setFunctionAddress(JSC::FunctionPtr functionAddress)
+    void setFunctionAddress(JSC::FunctionPtr<CSSOperationPtrTag> functionAddress)
     {
         m_functionAddress = functionAddress;
     }
@@ -147,7 +148,7 @@
                 m_assembler.move(m_firstArgument, JSC::GPRInfo::argumentGPR0);
         }
 
-        JSC::MacroAssembler::Call call = m_assembler.call(JSC::CFunctionPtrTag);
+        JSC::MacroAssembler::Call call = m_assembler.call(CSSOperationPtrTag);
         m_callRegistry.append(std::make_pair(call, m_functionAddress));
     }
 
@@ -178,12 +179,12 @@
     JSC::MacroAssembler& m_assembler;
     RegisterAllocator& m_registerAllocator;
     StackAllocator& m_stackAllocator;
-    Vector<std::pair<JSC::MacroAssembler::Call, JSC::FunctionPtr>, 32>& m_callRegistry;
+    Vector<std::pair<JSC::MacroAssembler::Call, JSC::FunctionPtr<CSSOperationPtrTag>>, 32>& m_callRegistry;
 
     RegisterVector m_savedRegisters;
     StackAllocator::StackReferenceVector m_savedRegisterStackReferences;
     
-    JSC::FunctionPtr m_functionAddress;
+    JSC::FunctionPtr<CSSOperationPtrTag> m_functionAddress;
     unsigned m_argumentCount;
     JSC::MacroAssembler::RegisterID m_firstArgument;
     JSC::MacroAssembler::RegisterID m_secondArgument;
diff --git a/Source/WebCore/cssjit/SelectorCompiler.cpp b/Source/WebCore/cssjit/SelectorCompiler.cpp
index fde0409..b77081d 100644
--- a/Source/WebCore/cssjit/SelectorCompiler.cpp
+++ b/Source/WebCore/cssjit/SelectorCompiler.cpp
@@ -201,8 +201,8 @@
     Vector<const Vector<AtomicString>*> languageArgumentsList;
     Vector<const AtomicStringImpl*, 8> classNames;
     HashSet<unsigned> pseudoClasses;
-    Vector<JSC::FunctionPtr, 4> unoptimizedPseudoClasses;
-    Vector<JSC::FunctionPtr, 4> unoptimizedPseudoClassesWithContext;
+    Vector<JSC::FunctionPtr<CSSOperationPtrTag>, 4> unoptimizedPseudoClasses;
+    Vector<JSC::FunctionPtr<CSSOperationPtrTag>, 4> unoptimizedPseudoClassesWithContext;
     Vector<AttributeMatchingInfo, 4> attributes;
     Vector<std::pair<int, int>, 2> nthChildFilters;
     Vector<NthChildOfSelectorInfo> nthChildOfFilters;
@@ -258,7 +258,7 @@
 class SelectorCodeGenerator {
 public:
     SelectorCodeGenerator(const CSSSelector*, SelectorContext);
-    SelectorCompilationStatus compile(JSC::MacroAssemblerCodeRef&);
+    SelectorCompilationStatus compile(JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag>&);
 
 private:
     static const Assembler::RegisterID returnRegister;
@@ -292,8 +292,8 @@
     void generateElementMatching(Assembler::JumpList& matchingTagNameFailureCases, Assembler::JumpList& matchingPostTagNameFailureCases, const SelectorFragment&);
     void generateElementDataMatching(Assembler::JumpList& failureCases, const SelectorFragment&);
     void generateElementLinkMatching(Assembler::JumpList& failureCases, const SelectorFragment&);
-    void generateElementFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr);
-    void generateContextFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr);
+    void generateElementFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr<CSSOperationPtrTag>);
+    void generateContextFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr<CSSOperationPtrTag>);
     void generateElementIsActive(Assembler::JumpList& failureCases, const SelectorFragment&);
     void generateElementIsEmpty(Assembler::JumpList& failureCases);
     void generateElementIsFirstChild(Assembler::JumpList& failureCases);
@@ -309,7 +309,7 @@
     void generateElementAttributeMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, Assembler::RegisterID decIndexRegister, const AttributeMatchingInfo& attributeInfo);
     void generateElementAttributeValueMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AttributeMatchingInfo& attributeInfo);
     void generateElementAttributeValueExactMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AtomicString& expectedValue, AttributeCaseSensitivity valueCaseSensitivity);
-    void generateElementAttributeFunctionCallValueMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AtomicString& expectedValue, AttributeCaseSensitivity valueCaseSensitivity, JSC::FunctionPtr caseSensitiveTest, JSC::FunctionPtr caseInsensitiveTest);
+    void generateElementAttributeFunctionCallValueMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AtomicString& expectedValue, AttributeCaseSensitivity valueCaseSensitivity, JSC::FunctionPtr<CSSOperationPtrTag> caseSensitiveTest, JSC::FunctionPtr<CSSOperationPtrTag> caseInsensitiveTest);
     void generateElementHasTagName(Assembler::JumpList& failureCases, const CSSSelector& tagMatchingSelector);
     void generateElementHasId(Assembler::JumpList& failureCases, const LocalRegister& elementDataAddress, const AtomicString& idToMatch);
     void generateElementHasClasses(Assembler::JumpList& failureCases, const LocalRegister& elementDataAddress, const Vector<const AtomicStringImpl*, 8>& classNames);
@@ -355,7 +355,7 @@
     Assembler m_assembler;
     RegisterAllocator m_registerAllocator;
     StackAllocator m_stackAllocator;
-    Vector<std::pair<Assembler::Call, JSC::FunctionPtr>, 32> m_functionCalls;
+    Vector<std::pair<Assembler::Call, JSC::FunctionPtr<CSSOperationPtrTag>>, 32> m_functionCalls;
 
     SelectorContext m_selectorContext;
     FunctionType m_functionType;
@@ -392,7 +392,7 @@
 
 static void computeBacktrackingInformation(SelectorFragmentList& selectorFragments, unsigned level = 0);
 
-SelectorCompilationStatus compileSelector(const CSSSelector* lastSelector, SelectorContext selectorContext, JSC::MacroAssemblerCodeRef& codeRef)
+SelectorCompilationStatus compileSelector(const CSSSelector* lastSelector, SelectorContext selectorContext, JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag>& codeRef)
 {
     if (!JSC::VM::canUseJIT())
         return SelectorCompilationStatus::CannotCompile;
@@ -436,7 +436,7 @@
 
 FunctionType SelectorFragment::appendUnoptimizedPseudoClassWithContext(bool (*matcher)(const SelectorChecker::CheckingContext&))
 {
-    unoptimizedPseudoClassesWithContext.append(JSC::FunctionPtr(matcher));
+    unoptimizedPseudoClassesWithContext.append(JSC::FunctionPtr<CSSOperationPtrTag>(matcher));
     return FunctionType::SelectorCheckerWithCheckingContext;
 }
 
@@ -534,83 +534,83 @@
     switch (type) {
     // Unoptimized pseudo selector. They are just function call to a simple testing function.
     case CSSSelector::PseudoClassAutofill:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isAutofilled));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isAutofilled));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassAutofillStrongPassword:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isAutofilledStrongPassword));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isAutofilledStrongPassword));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassChecked:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isChecked));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isChecked));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassDefault:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesDefaultPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesDefaultPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassDisabled:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesDisabledPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesDisabledPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassEnabled:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesEnabledPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesEnabledPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassDefined:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isDefinedElement));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isDefinedElement));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassFocus:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(SelectorChecker::matchesFocusPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(SelectorChecker::matchesFocusPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassFullPageMedia:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isMediaDocument));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isMediaDocument));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassInRange:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isInRange));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isInRange));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassIndeterminate:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesIndeterminatePseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesIndeterminatePseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassInvalid:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isInvalid));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isInvalid));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassOptional:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isOptionalFormControl));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isOptionalFormControl));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassOutOfRange:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isOutOfRange));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isOutOfRange));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassReadOnly:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesReadOnlyPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesReadOnlyPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassReadWrite:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesReadWritePseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesReadWritePseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassRequired:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isRequiredFormControl));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isRequiredFormControl));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassValid:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isValid));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isValid));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassWindowInactive:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(isWindowInactive));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(isWindowInactive));
         return FunctionType::SimpleSelectorChecker;
 
 #if ENABLE(FULLSCREEN_API)
     case CSSSelector::PseudoClassFullScreen:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesFullScreenPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesFullScreenPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassFullScreenDocument:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesFullScreenDocumentPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesFullScreenDocumentPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassFullScreenAncestor:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesFullScreenAncestorPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesFullScreenAncestorPseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassAnimatingFullScreenTransition:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesFullScreenAnimatingFullScreenTransitionPseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesFullScreenAnimatingFullScreenTransitionPseudoClass));
         return FunctionType::SimpleSelectorChecker;
 #endif
 #if ENABLE(VIDEO_TRACK)
     case CSSSelector::PseudoClassFuture:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesFutureCuePseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesFutureCuePseudoClass));
         return FunctionType::SimpleSelectorChecker;
     case CSSSelector::PseudoClassPast:
-        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr(matchesPastCuePseudoClass));
+        fragment.unoptimizedPseudoClasses.append(JSC::FunctionPtr<CSSOperationPtrTag>(matchesPastCuePseudoClass));
         return FunctionType::SimpleSelectorChecker;
 #endif
 
@@ -1216,7 +1216,7 @@
     }
 }
 
-inline SelectorCompilationStatus SelectorCodeGenerator::compile(JSC::MacroAssemblerCodeRef& codeRef)
+inline SelectorCompilationStatus SelectorCodeGenerator::compile(JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag>& codeRef)
 {
     switch (m_functionType) {
     case FunctionType::SimpleSelectorChecker:
@@ -1244,11 +1244,10 @@
     for (unsigned i = 0; i < m_functionCalls.size(); i++)
         linkBuffer.link(m_functionCalls[i].first, m_functionCalls[i].second);
 
-    JSC::PtrTag tag = ptrTag(&codeRef, m_selectorContext);
 #if CSS_SELECTOR_JIT_DEBUGGING
-    codeRef = linkBuffer.finalizeCodeWithDisassembly(tag, "CSS Selector JIT for \"%s\"", m_originalSelector->selectorText().utf8().data());
+    codeRef = linkBuffer.finalizeCodeWithDisassembly(CSSSelectorPtrTag, "CSS Selector JIT for \"%s\"", m_originalSelector->selectorText().utf8().data());
 #else
-    codeRef = FINALIZE_CODE(linkBuffer, tag, "CSS Selector JIT");
+    codeRef = FINALIZE_CODE(linkBuffer, CSSSelectorPtrTag, "CSS Selector JIT");
 #endif
 
     if (m_functionType == FunctionType::SimpleSelectorChecker || m_functionType == FunctionType::CannotMatchAnything)
@@ -3019,7 +3018,7 @@
     }
 }
 
-void SelectorCodeGenerator::generateElementAttributeFunctionCallValueMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AtomicString& expectedValue, AttributeCaseSensitivity valueCaseSensitivity, JSC::FunctionPtr caseSensitiveTest, JSC::FunctionPtr caseInsensitiveTest)
+void SelectorCodeGenerator::generateElementAttributeFunctionCallValueMatching(Assembler::JumpList& failureCases, Assembler::RegisterID currentAttributeAddress, const AtomicString& expectedValue, AttributeCaseSensitivity valueCaseSensitivity, JSC::FunctionPtr<CSSOperationPtrTag> caseSensitiveTest, JSC::FunctionPtr<CSSOperationPtrTag> caseInsensitiveTest)
 {
     LocalRegisterWithPreference expectedValueRegister(m_registerAllocator, JSC::GPRInfo::argumentGPR1);
     m_assembler.move(Assembler::TrustedImmPtr(expectedValue.impl()), expectedValueRegister);
@@ -3075,7 +3074,7 @@
     }
 }
 
-void SelectorCodeGenerator::generateElementFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr testFunction)
+void SelectorCodeGenerator::generateElementFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr<CSSOperationPtrTag> testFunction)
 {
     Assembler::RegisterID elementAddress = elementAddressRegister;
     FunctionCall functionCall(m_assembler, m_registerAllocator, m_stackAllocator, m_functionCalls);
@@ -3084,7 +3083,7 @@
     failureCases.append(functionCall.callAndBranchOnBooleanReturnValue(Assembler::Zero));
 }
 
-void SelectorCodeGenerator::generateContextFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr testFunction)
+void SelectorCodeGenerator::generateContextFunctionCallTest(Assembler::JumpList& failureCases, JSC::FunctionPtr<CSSOperationPtrTag> testFunction)
 {
     Assembler::RegisterID checkingContext = m_registerAllocator.allocateRegister();
     loadCheckingContext(checkingContext);
diff --git a/Source/WebCore/cssjit/SelectorCompiler.h b/Source/WebCore/cssjit/SelectorCompiler.h
index 4971545..6099b86 100644
--- a/Source/WebCore/cssjit/SelectorCompiler.h
+++ b/Source/WebCore/cssjit/SelectorCompiler.h
@@ -53,34 +53,30 @@
 typedef unsigned (*RuleCollectorSelectorCheckerWithCheckingContext)(const Element*, SelectorChecker::CheckingContext*, unsigned*);
 typedef unsigned (*QuerySelectorSelectorCheckerWithCheckingContext)(const Element*, const SelectorChecker::CheckingContext*);
 
-SelectorCompilationStatus compileSelector(const CSSSelector*, SelectorContext, JSC::MacroAssemblerCodeRef& outputCodeRef);
+SelectorCompilationStatus compileSelector(const CSSSelector*, SelectorContext, JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag>& outputCodeRef);
 
-inline RuleCollectorSimpleSelectorChecker ruleCollectorSimpleSelectorCheckerFunction(const JSC::MacroAssemblerCodeRef& codeRef, void* executableAddress, SelectorCompilationStatus compilationStatus)
+inline RuleCollectorSimpleSelectorChecker ruleCollectorSimpleSelectorCheckerFunction(void* executableAddress, SelectorCompilationStatus compilationStatus)
 {
     ASSERT_UNUSED(compilationStatus, compilationStatus == SelectorCompilationStatus::SimpleSelectorChecker);
-    JSC::PtrTag tag = JSC::ptrTag(&codeRef, SelectorContext::RuleCollector);
-    return JSC::untagCFunctionPtr<RuleCollectorSimpleSelectorChecker>(executableAddress, tag);
+    return JSC::untagCFunctionPtr<RuleCollectorSimpleSelectorChecker, CSSSelectorPtrTag>(executableAddress);
 }
 
-inline QuerySelectorSimpleSelectorChecker querySelectorSimpleSelectorCheckerFunction(const JSC::MacroAssemblerCodeRef& codeRef, void* executableAddress, SelectorCompilationStatus compilationStatus)
+inline QuerySelectorSimpleSelectorChecker querySelectorSimpleSelectorCheckerFunction(void* executableAddress, SelectorCompilationStatus compilationStatus)
 {
     ASSERT_UNUSED(compilationStatus, compilationStatus == SelectorCompilationStatus::SimpleSelectorChecker);
-    JSC::PtrTag tag = JSC::ptrTag(&codeRef, SelectorContext::QuerySelector);
-    return JSC::untagCFunctionPtr<QuerySelectorSimpleSelectorChecker>(executableAddress, tag);
+    return JSC::untagCFunctionPtr<QuerySelectorSimpleSelectorChecker, CSSSelectorPtrTag>(executableAddress);
 }
 
-inline RuleCollectorSelectorCheckerWithCheckingContext ruleCollectorSelectorCheckerFunctionWithCheckingContext(const JSC::MacroAssemblerCodeRef& codeRef, void* executableAddress, SelectorCompilationStatus compilationStatus)
+inline RuleCollectorSelectorCheckerWithCheckingContext ruleCollectorSelectorCheckerFunctionWithCheckingContext(void* executableAddress, SelectorCompilationStatus compilationStatus)
 {
     ASSERT_UNUSED(compilationStatus, compilationStatus == SelectorCompilationStatus::SelectorCheckerWithCheckingContext);
-    JSC::PtrTag tag = JSC::ptrTag(&codeRef, SelectorContext::RuleCollector);
-    return JSC::untagCFunctionPtr<RuleCollectorSelectorCheckerWithCheckingContext>(executableAddress, tag);
+    return JSC::untagCFunctionPtr<RuleCollectorSelectorCheckerWithCheckingContext, CSSSelectorPtrTag>(executableAddress);
 }
 
-inline QuerySelectorSelectorCheckerWithCheckingContext querySelectorSelectorCheckerFunctionWithCheckingContext(const JSC::MacroAssemblerCodeRef& codeRef, void* executableAddress, SelectorCompilationStatus compilationStatus)
+inline QuerySelectorSelectorCheckerWithCheckingContext querySelectorSelectorCheckerFunctionWithCheckingContext(void* executableAddress, SelectorCompilationStatus compilationStatus)
 {
     ASSERT_UNUSED(compilationStatus, compilationStatus == SelectorCompilationStatus::SelectorCheckerWithCheckingContext);
-    JSC::PtrTag tag = JSC::ptrTag(&codeRef, SelectorContext::QuerySelector);
-    return JSC::untagCFunctionPtr<QuerySelectorSelectorCheckerWithCheckingContext>(executableAddress, tag);
+    return JSC::untagCFunctionPtr<QuerySelectorSelectorCheckerWithCheckingContext, CSSSelectorPtrTag>(executableAddress);
 }
 
 } // namespace SelectorCompiler
diff --git a/Source/WebCore/dom/SelectorQuery.cpp b/Source/WebCore/dom/SelectorQuery.cpp
index a8a5df9..79e862d 100644
--- a/Source/WebCore/dom/SelectorQuery.cpp
+++ b/Source/WebCore/dom/SelectorQuery.cpp
@@ -451,11 +451,11 @@
             bool matched = false;
             void* compiledSelectorChecker = selector.compiledSelectorCodeRef.code().executableAddress();
             if (selector.compilationStatus == SelectorCompilationStatus::SimpleSelectorChecker) {
-                auto selectorChecker = SelectorCompiler::querySelectorSimpleSelectorCheckerFunction(selector.compiledSelectorCodeRef, compiledSelectorChecker, selector.compilationStatus);
+                auto selectorChecker = SelectorCompiler::querySelectorSimpleSelectorCheckerFunction(compiledSelectorChecker, selector.compilationStatus);
                 matched = selectorChecker(&element);
             } else {
                 ASSERT(selector.compilationStatus == SelectorCompilationStatus::SelectorCheckerWithCheckingContext);
-                auto selectorChecker = SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext(selector.compiledSelectorCodeRef, compiledSelectorChecker, selector.compilationStatus);
+                auto selectorChecker = SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext(compiledSelectorChecker, selector.compilationStatus);
                 matched = selectorChecker(&element, &checkingContext);
             }
             if (matched) {
@@ -543,11 +543,11 @@
         const SelectorData& selectorData = m_selectors.first();
         void* compiledSelectorChecker = selectorData.compiledSelectorCodeRef.code().executableAddress();
         if (selectorData.compilationStatus == SelectorCompilationStatus::SimpleSelectorChecker) {
-            SelectorCompiler::QuerySelectorSimpleSelectorChecker selectorChecker = SelectorCompiler::querySelectorSimpleSelectorCheckerFunction(selectorData.compiledSelectorCodeRef, compiledSelectorChecker, selectorData.compilationStatus);
+            SelectorCompiler::QuerySelectorSimpleSelectorChecker selectorChecker = SelectorCompiler::querySelectorSimpleSelectorCheckerFunction(compiledSelectorChecker, selectorData.compilationStatus);
             executeCompiledSimpleSelectorChecker<SelectorQueryTrait>(*searchRootNode, selectorChecker, output, selectorData);
         } else {
             ASSERT(selectorData.compilationStatus == SelectorCompilationStatus::SelectorCheckerWithCheckingContext);
-            SelectorCompiler::QuerySelectorSelectorCheckerWithCheckingContext selectorChecker = SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext(selectorData.compiledSelectorCodeRef, compiledSelectorChecker, selectorData.compilationStatus);
+            SelectorCompiler::QuerySelectorSelectorCheckerWithCheckingContext selectorChecker = SelectorCompiler::querySelectorSelectorCheckerFunctionWithCheckingContext(compiledSelectorChecker, selectorData.compilationStatus);
             executeCompiledSelectorCheckerWithCheckingContext<SelectorQueryTrait>(rootNode, *searchRootNode, selectorChecker, output, selectorData);
         }
         break;
diff --git a/Source/WebCore/dom/SelectorQuery.h b/Source/WebCore/dom/SelectorQuery.h
index 197b10d..c6a4409 100644
--- a/Source/WebCore/dom/SelectorQuery.h
+++ b/Source/WebCore/dom/SelectorQuery.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011, 2013, 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -60,7 +60,7 @@
 
         const CSSSelector* selector;
 #if ENABLE(CSS_SELECTOR_JIT)
-        mutable JSC::MacroAssemblerCodeRef compiledSelectorCodeRef;
+        mutable JSC::MacroAssemblerCodeRef<CSSSelectorPtrTag> compiledSelectorCodeRef;
         mutable SelectorCompilationStatus compilationStatus;
 #if CSS_SELECTOR_JIT_PROFILING
         ~SelectorData()