Skip to content

Commit

Permalink
[CodeGen] Really renumber slot indexes before register allocation (#6…
Browse files Browse the repository at this point in the history
…7038)

PR #66334 tried to renumber slot indexes before register allocation, but
the numbering was still affected by list entries for instructions which
had been erased. Fix this to make the register allocator's live range
length heuristics even less dependent on the history of how instructions
have been added to and removed from SlotIndexes's maps.
  • Loading branch information
jayfoad committed Oct 9, 2023
1 parent 648046d commit 2501ae5
Show file tree
Hide file tree
Showing 736 changed files with 250,920 additions and 259,056 deletions.
34 changes: 32 additions & 2 deletions llvm/lib/CodeGen/SlotIndexes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -238,8 +238,38 @@ void SlotIndexes::repairIndexesInRange(MachineBasicBlock *MBB,
}

void SlotIndexes::packIndexes() {
for (auto [Index, Entry] : enumerate(indexList))
Entry.setIndex(Index * SlotIndex::InstrDist);
unsigned Index = 0;
// Check that the dummy entry for the start of the first block does not need
// updating. It should always be 0.
assert(idx2MBBMap[0].second->getNumber() == 0 &&
"First MBB should be number 0!");
assert(MBBRanges[0].first.getIndex() == Index && "First index should be 0!");
Index += SlotIndex::InstrDist;
// Iterate over basic blocks in slot index order.
for (MachineBasicBlock *MBB : make_second_range(idx2MBBMap)) {
auto [MBBStartIdx, MBBEndIdx] = MBBRanges[MBB->getNumber()];
auto Start = MBBStartIdx.listEntry()->getIterator();
auto End = MBBEndIdx.listEntry()->getIterator();
// Update entries for each instruction in the block.
for (auto &I : make_early_inc_range(make_range(std::next(Start), End))) {
if (I.getInstr()) {
I.setIndex(Index);
Index += SlotIndex::InstrDist;
} else {
// Remove entries for deleted instructions.
// FIXME: Eventually we want to remove them in
// removeMachineInstrFromMaps but that is not currently possible because
// some SlotIndexes API functions are called in a transiently broken
// state where some live ranges still refer to indexes of deleted
// instructions.
// TODO: Add removed entries to a free list so they can be reused?
indexList.remove(I);
}
}
// Update the dummy entry for the end of the block.
End->setIndex(Index);
Index += SlotIndex::InstrDist;
}
}

#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -251,8 +251,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -266,8 +266,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -281,8 +281,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -296,8 +296,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -311,8 +311,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire_const(ptr readonly %ptr)
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -326,8 +326,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst(ptr %ptr) {
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand All @@ -341,8 +341,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst_const(ptr readonly %ptr)
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand Down
32 changes: 16 additions & 16 deletions llvm/test/CodeGen/AArch64/Atomics/aarch64-atomic-load-rcpc.ll
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -251,8 +251,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -266,8 +266,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -281,8 +281,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -296,8 +296,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -311,8 +311,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire_const(ptr readonly %ptr)
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -326,8 +326,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst(ptr %ptr) {
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand All @@ -341,8 +341,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst_const(ptr readonly %ptr)
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand Down
32 changes: 16 additions & 16 deletions llvm/test/CodeGen/AArch64/Atomics/aarch64-atomic-load-v8a.ll
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -251,8 +251,8 @@ define dso_local i128 @load_atomic_i128_aligned_unordered_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_unordered_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr unordered, align 16
ret i128 %r
}
Expand All @@ -266,8 +266,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -281,8 +281,8 @@ define dso_local i128 @load_atomic_i128_aligned_monotonic_const(ptr readonly %pt
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_monotonic_const:
; -O1: ldxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr monotonic, align 16
ret i128 %r
}
Expand All @@ -296,8 +296,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire(ptr %ptr) {
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -311,8 +311,8 @@ define dso_local i128 @load_atomic_i128_aligned_acquire_const(ptr readonly %ptr)
; -O0: stxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_acquire_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr acquire, align 16
ret i128 %r
}
Expand All @@ -326,8 +326,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst(ptr %ptr) {
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand All @@ -341,8 +341,8 @@ define dso_local i128 @load_atomic_i128_aligned_seq_cst_const(ptr readonly %ptr)
; -O0: stlxp w8, x0, x1, [x9]
;
; -O1-LABEL: load_atomic_i128_aligned_seq_cst_const:
; -O1: ldaxp x0, x1, [x8]
; -O1: stlxp w9, x0, x1, [x8]
; -O1: ldaxp x8, x1, [x0]
; -O1: stlxp w9, x8, x1, [x0]
%r = load atomic i128, ptr %ptr seq_cst, align 16
ret i128 %r
}
Expand Down
Loading

0 comments on commit 2501ae5

Please sign in to comment.