assert(RI.getOperand().isUse() &&
"The only use of the vreg must be a use, we haven't emitted the def!");
+ MachineInstr *User = &*RI;
+
+ // Set the insertion point properly. Folding the load can cause generation of
+ // other random instructions (like sign extends) for addressing modes, make
+ // sure they get inserted in a logical place before the new instruction.
+ FuncInfo->InsertPt = User;
+ FuncInfo->MBB = User->getParent();
+
// Ask the target to try folding the load.
- return FastIS->TryToFoldLoad(&*RI, RI.getOperandNo(), LI);
+ return FastIS->TryToFoldLoad(User, RI.getOperandNo(), LI);
}
#ifndef NDEBUG
if (Inst != Begin)
BeforeInst = llvm::prior(llvm::prior(BI));
if (BeforeInst && isa<LoadInst>(BeforeInst) &&
- BeforeInst->hasOneUse() && *BeforeInst->use_begin() == Inst) {
- FastIS->recomputeInsertPt();
- if (TryToFoldFastISelLoad(cast<LoadInst>(BeforeInst), FastIS))
- --BI; // If we succeeded, don't re-select the load.
- }
+ BeforeInst->hasOneUse() && *BeforeInst->use_begin() == Inst &&
+ TryToFoldFastISelLoad(cast<LoadInst>(BeforeInst), FastIS))
+ --BI; // If we succeeded, don't re-select the load.
continue;
}
XII.foldMemoryOperandImpl(*FuncInfo.MF, MI, OpNo, AddrOps, Size, Alignment);
if (Result == 0) return false;
- MI->getParent()->insert(MI, Result);
+ FuncInfo.MBB->insert(FuncInfo.InsertPt, Result);
MI->eraseFromParent();
return true;
}
; X64: test4:
; X64: 128(%r{{.*}},%r{{.*}},8)
}
+
+; PR8961 - Make sure the sext for the GEP addressing comes before the load that
+; is folded.
+define i64 @test5(i8* %A, i32 %I, i64 %B) nounwind {
+ %v8 = getelementptr i8* %A, i32 %I
+ %v9 = bitcast i8* %v8 to i64*
+ %v10 = load i64* %v9
+ %v11 = add i64 %B, %v10
+ ret i64 %v11
+; X64: test5:
+; X64: movslq %esi, %rax
+; X64-NEXT: movq (%rdi,%rax), %rax
+; X64-NEXT: addq %rdx, %rax
+; X64-NEXT: ret
+}
+
+