@@ -16537,8 +16537,9 @@ Lowerer::GenerateFastElemIIntIndexCommon(
1653716537 IR::BailOutConventionalNativeArrayAccessOnly |
1653816538 IR::BailOutOnMissingValue |
1653916539 (bailOutKind & IR::BailOutOnArrayAccessHelperCall ? IR::BailOutInvalid : IR::BailOutConvertedNativeArray)
16540- )
16541- ));
16540+ )
16541+ )
16542+ );
1654216543
1654316544 if (bailOutKind & IR::BailOutOnArrayAccessHelperCall)
1654416545 {
@@ -16975,7 +16976,7 @@ Lowerer::GenerateFastElemIIntIndexCommon(
1697516976
1697616977 IR::Opnd * tmpDst = nullptr;
1697716978 IR::Opnd * dst = instr->GetDst();
16978- //Pop might not have a dst, if not don't worry about returning the last element. But we still have to
16979+ // Pop might not have a dst, if not don't worry about returning the last element. But we still have to
1697916980 // worry about gaps, because these force us to access the prototype chain, which may have side-effects.
1698016981 if (dst || !baseValueType.HasNoMissingValues())
1698116982 {
@@ -16989,6 +16990,77 @@ Lowerer::GenerateFastElemIIntIndexCommon(
1698916990 dst = tmpDst;
1699016991 }
1699116992
16993+ {
16994+ // Use a mask to prevent arbitrary speculative reads
16995+ if (!headSegmentLengthOpnd)
16996+ {
16997+ headSegmentLengthOpnd =
16998+ IR::IndirOpnd::New(headSegmentOpnd, Js::SparseArraySegmentBase::GetOffsetOfLength(), TyUint32, m_func);
16999+ autoReuseHeadSegmentLengthOpnd.Initialize(headSegmentLengthOpnd, m_func);
17000+ }
17001+ IR::RegOpnd* localMaskOpnd = nullptr;
17002+ #if TARGET_64
17003+ IR::Opnd* lengthOpnd = nullptr;
17004+ AnalysisAssert(headSegmentLengthOpnd != nullptr);
17005+ lengthOpnd = IR::RegOpnd::New(headSegmentLengthOpnd->GetType(), m_func);
17006+ {
17007+ IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, lengthOpnd, headSegmentLengthOpnd, m_func);
17008+ instr->InsertBefore(instrMov);
17009+ LowererMD::Legalize(instrMov);
17010+ }
17011+
17012+ if (lengthOpnd->GetSize() != MachPtr)
17013+ {
17014+ lengthOpnd = lengthOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
17015+ }
17016+
17017+ // MOV r1, [opnd + offset(type)]
17018+ IR::RegOpnd* indexValueRegOpnd = IR::RegOpnd::New(indexValueOpnd->GetType(), m_func);
17019+
17020+ {
17021+ IR::Instr * instrMov = IR::Instr::New(Js::OpCode::MOV_TRUNC, indexValueRegOpnd, indexValueOpnd, m_func);
17022+ instr->InsertBefore(instrMov);
17023+ LowererMD::Legalize(instrMov);
17024+ }
17025+
17026+ if (indexValueRegOpnd->GetSize() != MachPtr)
17027+ {
17028+ indexValueRegOpnd = indexValueRegOpnd->UseWithNewType(TyMachPtr, this->m_func)->AsRegOpnd();
17029+ }
17030+
17031+ localMaskOpnd = IR::RegOpnd::New(TyMachPtr, m_func);
17032+ InsertSub(false, localMaskOpnd, indexValueRegOpnd, lengthOpnd, instr);
17033+ InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(63, TyInt8, m_func), instr);
17034+ #else
17035+ localMaskOpnd = IR::RegOpnd::New(TyInt32, m_func);
17036+ InsertSub(false, localMaskOpnd, indexValueOpnd, headSegmentLengthOpnd, instr);
17037+ InsertShift(Js::OpCode::Shr_A, false, localMaskOpnd, localMaskOpnd, IR::IntConstOpnd::New(31, TyInt8, m_func), instr);
17038+ #endif
17039+
17040+ // for pop we always do the masking before the load in cases where we load a value
17041+ IR::RegOpnd* loadAddr = IR::RegOpnd::New(TyMachPtr, m_func);
17042+
17043+ #if _M_ARM32_OR_ARM64
17044+ if (indirOpnd->GetIndexOpnd() != nullptr && indirOpnd->GetScale() > 0)
17045+ {
17046+ // We don't support encoding for LEA with scale on ARM/ARM64, so do the scale calculation as a separate instruction
17047+ IR::RegOpnd* fullIndexOpnd = IR::RegOpnd::New(indirOpnd->GetIndexOpnd()->GetType(), m_func);
17048+ InsertShift(Js::OpCode::Shl_A, false, fullIndexOpnd, indirOpnd->GetIndexOpnd(), IR::IntConstOpnd::New(indirOpnd->GetScale(), TyInt8, m_func), instr);
17049+ IR::IndirOpnd* newIndir = IR::IndirOpnd::New(indirOpnd->GetBaseOpnd(), fullIndexOpnd, indirType, m_func);
17050+ if (indirOpnd->GetOffset() != 0)
17051+ {
17052+ newIndir->SetOffset(indirOpnd->GetOffset());
17053+ }
17054+ indirOpnd = newIndir;
17055+ }
17056+ #endif
17057+ IR::AutoReuseOpnd reuseIndir(indirOpnd, m_func);
17058+
17059+ InsertLea(loadAddr, indirOpnd, instr);
17060+ InsertAnd(loadAddr, loadAddr, localMaskOpnd, instr);
17061+ indirOpnd = IR::IndirOpnd::New(loadAddr, 0, indirType, m_func);
17062+ }
17063+
1699217064 // MOV dst, [head + offset]
1699317065 InsertMove(dst, indirOpnd, instr);
1699417066
0 commit comments