comparison lib/CodeGen/VirtRegMap.cpp @ 95:afa8332a0e37

LLVM 3.8
author Kaito Tokumori <e105711@ie.u-ryukyu.ac.jp>
date Tue, 13 Oct 2015 17:48:58 +0900
parents 60c9769439b8
children 7d135dc70f03
comparison
equal deleted inserted replaced
84:f3e34b893a5f 95:afa8332a0e37
161 const TargetInstrInfo *TII; 161 const TargetInstrInfo *TII;
162 MachineRegisterInfo *MRI; 162 MachineRegisterInfo *MRI;
163 SlotIndexes *Indexes; 163 SlotIndexes *Indexes;
164 LiveIntervals *LIS; 164 LiveIntervals *LIS;
165 VirtRegMap *VRM; 165 VirtRegMap *VRM;
166 SparseSet<unsigned> PhysRegs;
167 166
168 void rewrite(); 167 void rewrite();
169 void addMBBLiveIns(); 168 void addMBBLiveIns();
169 bool readsUndefSubreg(const MachineOperand &MO) const;
170 void addLiveInsForSubRanges(const LiveInterval &LI, unsigned PhysReg) const;
171
170 public: 172 public:
171 static char ID; 173 static char ID;
172 VirtRegRewriter() : MachineFunctionPass(ID) {} 174 VirtRegRewriter() : MachineFunctionPass(ID) {}
173 175
174 void getAnalysisUsage(AnalysisUsage &AU) const override; 176 void getAnalysisUsage(AnalysisUsage &AU) const override;
234 VRM->clearAllVirt(); 236 VRM->clearAllVirt();
235 MRI->clearVirtRegs(); 237 MRI->clearVirtRegs();
236 return true; 238 return true;
237 } 239 }
238 240
241 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI,
242 unsigned PhysReg) const {
243 assert(!LI.empty());
244 assert(LI.hasSubRanges());
245
246 typedef std::pair<const LiveInterval::SubRange *,
247 LiveInterval::const_iterator> SubRangeIteratorPair;
248 SmallVector<SubRangeIteratorPair, 4> SubRanges;
249 SlotIndex First;
250 SlotIndex Last;
251 for (const LiveInterval::SubRange &SR : LI.subranges()) {
252 SubRanges.push_back(std::make_pair(&SR, SR.begin()));
253 if (!First.isValid() || SR.segments.front().start < First)
254 First = SR.segments.front().start;
255 if (!Last.isValid() || SR.segments.back().end > Last)
256 Last = SR.segments.back().end;
257 }
258
259 // Check all mbb start positions between First and Last while
260 // simulatenously advancing an iterator for each subrange.
261 for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First);
262 MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) {
263 SlotIndex MBBBegin = MBBI->first;
264 // Advance all subrange iterators so that their end position is just
265 // behind MBBBegin (or the iterator is at the end).
266 LaneBitmask LaneMask = 0;
267 for (auto &RangeIterPair : SubRanges) {
268 const LiveInterval::SubRange *SR = RangeIterPair.first;
269 LiveInterval::const_iterator &SRI = RangeIterPair.second;
270 while (SRI != SR->end() && SRI->end <= MBBBegin)
271 ++SRI;
272 if (SRI == SR->end())
273 continue;
274 if (SRI->start <= MBBBegin)
275 LaneMask |= SR->LaneMask;
276 }
277 if (LaneMask == 0)
278 continue;
279 MachineBasicBlock *MBB = MBBI->second;
280 MBB->addLiveIn(PhysReg, LaneMask);
281 }
282 }
283
239 // Compute MBB live-in lists from virtual register live ranges and their 284 // Compute MBB live-in lists from virtual register live ranges and their
240 // assignments. 285 // assignments.
241 void VirtRegRewriter::addMBBLiveIns() { 286 void VirtRegRewriter::addMBBLiveIns() {
242 SmallVector<MachineBasicBlock*, 16> LiveIn;
243 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) { 287 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) {
244 unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx); 288 unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx);
245 if (MRI->reg_nodbg_empty(VirtReg)) 289 if (MRI->reg_nodbg_empty(VirtReg))
246 continue; 290 continue;
247 LiveInterval &LI = LIS->getInterval(VirtReg); 291 LiveInterval &LI = LIS->getInterval(VirtReg);
251 // assigned PhysReg must be marked as live-in to those blocks. 295 // assigned PhysReg must be marked as live-in to those blocks.
252 unsigned PhysReg = VRM->getPhys(VirtReg); 296 unsigned PhysReg = VRM->getPhys(VirtReg);
253 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register."); 297 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register.");
254 298
255 if (LI.hasSubRanges()) { 299 if (LI.hasSubRanges()) {
256 for (LiveInterval::SubRange &S : LI.subranges()) { 300 addLiveInsForSubRanges(LI, PhysReg);
257 for (const auto &Seg : S.segments) { 301 } else {
258 if (!Indexes->findLiveInMBBs(Seg.start, Seg.end, LiveIn)) 302 // Go over MBB begin positions and see if we have segments covering them.
259 continue; 303 // The following works because segments and the MBBIndex list are both
260 for (MCSubRegIndexIterator SR(PhysReg, TRI); SR.isValid(); ++SR) { 304 // sorted by slot indexes.
261 unsigned SubReg = SR.getSubReg(); 305 SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin();
262 unsigned SubRegIndex = SR.getSubRegIndex(); 306 for (const auto &Seg : LI) {
263 unsigned SubRegLaneMask = TRI->getSubRegIndexLaneMask(SubRegIndex); 307 I = Indexes->advanceMBBIndex(I, Seg.start);
264 if ((SubRegLaneMask & S.LaneMask) == 0) 308 for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) {
265 continue; 309 MachineBasicBlock *MBB = I->second;
266 for (unsigned i = 0, e = LiveIn.size(); i != e; ++i) { 310 MBB->addLiveIn(PhysReg);
267 if (!LiveIn[i]->isLiveIn(SubReg))
268 LiveIn[i]->addLiveIn(SubReg);
269 }
270 }
271 LiveIn.clear();
272 } 311 }
273 } 312 }
274 } else {
275 // Scan the segments of LI.
276 for (const auto &Seg : LI.segments) {
277 if (!Indexes->findLiveInMBBs(Seg.start, Seg.end, LiveIn))
278 continue;
279 for (unsigned i = 0, e = LiveIn.size(); i != e; ++i)
280 if (!LiveIn[i]->isLiveIn(PhysReg))
281 LiveIn[i]->addLiveIn(PhysReg);
282 LiveIn.clear();
283 }
284 } 313 }
285 } 314 }
315
316 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in
317 // each MBB's LiveIns set before calling addLiveIn on them.
318 for (MachineBasicBlock &MBB : *MF)
319 MBB.sortUniqueLiveIns();
320 }
321
322 /// Returns true if the given machine operand \p MO only reads undefined lanes.
323 /// The function only works for use operands with a subregister set.
324 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const {
325 // Shortcut if the operand is already marked undef.
326 if (MO.isUndef())
327 return true;
328
329 unsigned Reg = MO.getReg();
330 const LiveInterval &LI = LIS->getInterval(Reg);
331 const MachineInstr &MI = *MO.getParent();
332 SlotIndex BaseIndex = LIS->getInstructionIndex(&MI);
333 // This code is only meant to handle reading undefined subregisters which
334 // we couldn't properly detect before.
335 assert(LI.liveAt(BaseIndex) &&
336 "Reads of completely dead register should be marked undef already");
337 unsigned SubRegIdx = MO.getSubReg();
338 LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx);
339 // See if any of the relevant subregister liveranges is defined at this point.
340 for (const LiveInterval::SubRange &SR : LI.subranges()) {
341 if ((SR.LaneMask & UseMask) != 0 && SR.liveAt(BaseIndex))
342 return false;
343 }
344 return true;
286 } 345 }
287 346
288 void VirtRegRewriter::rewrite() { 347 void VirtRegRewriter::rewrite() {
289 bool NoSubRegLiveness = !MRI->tracksSubRegLiveness(); 348 bool NoSubRegLiveness = !MRI->subRegLivenessEnabled();
290 SmallVector<unsigned, 8> SuperDeads; 349 SmallVector<unsigned, 8> SuperDeads;
291 SmallVector<unsigned, 8> SuperDefs; 350 SmallVector<unsigned, 8> SuperDefs;
292 SmallVector<unsigned, 8> SuperKills; 351 SmallVector<unsigned, 8> SuperKills;
293 SmallPtrSet<const MachineInstr *, 4> NoReturnInsts;
294
295 // Here we have a SparseSet to hold which PhysRegs are actually encountered
296 // in the MF we are about to iterate over so that later when we call
297 // setPhysRegUsed, we are only doing it for physRegs that were actually found
298 // in the program and not for all of the possible physRegs for the given
299 // target architecture. If the target has a lot of physRegs, then for a small
300 // program there will be a significant compile time reduction here.
301 PhysRegs.clear();
302 PhysRegs.setUniverse(TRI->getNumRegs());
303
304 // The function with uwtable should guarantee that the stack unwinder
305 // can unwind the stack to the previous frame. Thus, we can't apply the
306 // noreturn optimization if the caller function has uwtable attribute.
307 bool HasUWTable = MF->getFunction()->hasFnAttribute(Attribute::UWTable);
308 352
309 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end(); 353 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end();
310 MBBI != MBBE; ++MBBI) { 354 MBBI != MBBE; ++MBBI) {
311 DEBUG(MBBI->print(dbgs(), Indexes)); 355 DEBUG(MBBI->print(dbgs(), Indexes));
312 bool IsExitBB = MBBI->succ_empty();
313 for (MachineBasicBlock::instr_iterator 356 for (MachineBasicBlock::instr_iterator
314 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) { 357 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) {
315 MachineInstr *MI = MII; 358 MachineInstr *MI = &*MII;
316 ++MII; 359 ++MII;
317
318 // Check if this instruction is a call to a noreturn function. If this
319 // is a call to noreturn function and we don't need the stack unwinding
320 // functionality (i.e. this function does not have uwtable attribute and
321 // the callee function has the nounwind attribute), then we can ignore
322 // the definitions set by this instruction.
323 if (!HasUWTable && IsExitBB && MI->isCall()) {
324 for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
325 MOE = MI->operands_end(); MOI != MOE; ++MOI) {
326 MachineOperand &MO = *MOI;
327 if (!MO.isGlobal())
328 continue;
329 const Function *Func = dyn_cast<Function>(MO.getGlobal());
330 if (!Func || !Func->hasFnAttribute(Attribute::NoReturn) ||
331 // We need to keep correct unwind information
332 // even if the function will not return, since the
333 // runtime may need it.
334 !Func->hasFnAttribute(Attribute::NoUnwind))
335 continue;
336 NoReturnInsts.insert(MI);
337 break;
338 }
339 }
340 360
341 for (MachineInstr::mop_iterator MOI = MI->operands_begin(), 361 for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
342 MOE = MI->operands_end(); MOI != MOE; ++MOI) { 362 MOE = MI->operands_end(); MOI != MOE; ++MOI) {
343 MachineOperand &MO = *MOI; 363 MachineOperand &MO = *MOI;
344 364
345 // Make sure MRI knows about registers clobbered by regmasks. 365 // Make sure MRI knows about registers clobbered by regmasks.
346 if (MO.isRegMask()) 366 if (MO.isRegMask())
347 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 367 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
348
349 // If we encounter a VirtReg or PhysReg then get at the PhysReg and add
350 // it to the physreg bitset. Later we use only the PhysRegs that were
351 // actually encountered in the MF to populate the MRI's used physregs.
352 if (MO.isReg() && MO.getReg())
353 PhysRegs.insert(
354 TargetRegisterInfo::isVirtualRegister(MO.getReg()) ?
355 VRM->getPhys(MO.getReg()) :
356 MO.getReg());
357 368
358 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg())) 369 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
359 continue; 370 continue;
360 unsigned VirtReg = MO.getReg(); 371 unsigned VirtReg = MO.getReg();
361 unsigned PhysReg = VRM->getPhys(VirtReg); 372 unsigned PhysReg = VRM->getPhys(VirtReg);
362 assert(PhysReg != VirtRegMap::NO_PHYS_REG && 373 assert(PhysReg != VirtRegMap::NO_PHYS_REG &&
363 "Instruction uses unmapped VirtReg"); 374 "Instruction uses unmapped VirtReg");
364 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment"); 375 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment");
365 376
366 // Preserve semantics of sub-register operands. 377 // Preserve semantics of sub-register operands.
367 if (MO.getSubReg()) { 378 unsigned SubReg = MO.getSubReg();
368 // A virtual register kill refers to the whole register, so we may 379 if (SubReg != 0) {
369 // have to add <imp-use,kill> operands for the super-register. A 380 if (NoSubRegLiveness) {
370 // partial redef always kills and redefines the super-register. 381 // A virtual register kill refers to the whole register, so we may
371 if (NoSubRegLiveness && MO.readsReg() 382 // have to add <imp-use,kill> operands for the super-register. A
372 && (MO.isDef() || MO.isKill())) 383 // partial redef always kills and redefines the super-register.
373 SuperKills.push_back(PhysReg); 384 if (MO.readsReg() && (MO.isDef() || MO.isKill()))
374 385 SuperKills.push_back(PhysReg);
375 if (MO.isDef()) { 386
376 // The <def,undef> flag only makes sense for sub-register defs, and 387 if (MO.isDef()) {
377 // we are substituting a full physreg. An <imp-use,kill> operand 388 // Also add implicit defs for the super-register.
378 // from the SuperKills list will represent the partial read of the
379 // super-register.
380 MO.setIsUndef(false);
381
382 // Also add implicit defs for the super-register.
383 if (NoSubRegLiveness) {
384 if (MO.isDead()) 389 if (MO.isDead())
385 SuperDeads.push_back(PhysReg); 390 SuperDeads.push_back(PhysReg);
386 else 391 else
387 SuperDefs.push_back(PhysReg); 392 SuperDefs.push_back(PhysReg);
388 } 393 }
394 } else {
395 if (MO.isUse()) {
396 if (readsUndefSubreg(MO))
397 // We need to add an <undef> flag if the subregister is
398 // completely undefined (and we are not adding super-register
399 // defs).
400 MO.setIsUndef(true);
401 } else if (!MO.isDead()) {
402 assert(MO.isDef());
403 // Things get tricky when we ran out of lane mask bits and
404 // merged multiple lanes into the overflow bit: In this case
405 // our subregister liveness tracking isn't precise and we can't
406 // know what subregister parts are undefined, fall back to the
407 // implicit super-register def then.
408 LaneBitmask LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
409 if (TargetRegisterInfo::isImpreciseLaneMask(LaneMask))
410 SuperDefs.push_back(PhysReg);
411 }
389 } 412 }
390 413
414 // The <def,undef> flag only makes sense for sub-register defs, and
415 // we are substituting a full physreg. An <imp-use,kill> operand
416 // from the SuperKills list will represent the partial read of the
417 // super-register.
418 if (MO.isDef())
419 MO.setIsUndef(false);
420
391 // PhysReg operands cannot have subregister indexes. 421 // PhysReg operands cannot have subregister indexes.
392 PhysReg = TRI->getSubReg(PhysReg, MO.getSubReg()); 422 PhysReg = TRI->getSubReg(PhysReg, SubReg);
393 assert(PhysReg && "Invalid SubReg for physical register"); 423 assert(PhysReg && "Invalid SubReg for physical register");
394 MO.setSubReg(0); 424 MO.setSubReg(0);
395 } 425 }
396 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but 426 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but
397 // we need the inlining here. 427 // we need the inlining here.
412 DEBUG(dbgs() << "> " << *MI); 442 DEBUG(dbgs() << "> " << *MI);
413 443
414 // Finally, remove any identity copies. 444 // Finally, remove any identity copies.
415 if (MI->isIdentityCopy()) { 445 if (MI->isIdentityCopy()) {
416 ++NumIdCopies; 446 ++NumIdCopies;
417 if (MI->getNumOperands() == 2) { 447 DEBUG(dbgs() << "Deleting identity copy.\n");
418 DEBUG(dbgs() << "Deleting identity copy.\n"); 448 if (Indexes)
419 if (Indexes) 449 Indexes->removeMachineInstrFromMaps(MI);
420 Indexes->removeMachineInstrFromMaps(MI); 450 // It's safe to erase MI because MII has already been incremented.
421 // It's safe to erase MI because MII has already been incremented. 451 MI->eraseFromParent();
422 MI->eraseFromParent();
423 } else {
424 // Transform identity copy to a KILL to deal with subregisters.
425 MI->setDesc(TII->get(TargetOpcode::KILL));
426 DEBUG(dbgs() << "Identity copy: " << *MI);
427 }
428 } 452 }
429 } 453 }
430 } 454 }
431 455 }
432 // Tell MRI about physical registers in use. 456
433 if (NoReturnInsts.empty()) {
434 for (SparseSet<unsigned>::iterator
435 RegI = PhysRegs.begin(), E = PhysRegs.end(); RegI != E; ++RegI)
436 if (!MRI->reg_nodbg_empty(*RegI))
437 MRI->setPhysRegUsed(*RegI);
438 } else {
439 for (SparseSet<unsigned>::iterator
440 I = PhysRegs.begin(), E = PhysRegs.end(); I != E; ++I) {
441 unsigned Reg = *I;
442 if (MRI->reg_nodbg_empty(Reg))
443 continue;
444 // Check if this register has a use that will impact the rest of the
445 // code. Uses in debug and noreturn instructions do not impact the
446 // generated code.
447 for (MachineInstr &It : MRI->reg_nodbg_instructions(Reg)) {
448 if (!NoReturnInsts.count(&It)) {
449 MRI->setPhysRegUsed(Reg);
450 break;
451 }
452 }
453 }
454 }
455 }
456