aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Dunbar <daniel@zuster.org>2010-04-22 15:22:33 +0000
committerDaniel Dunbar <daniel@zuster.org>2010-04-22 15:22:33 +0000
commit52968a1c765e43000f904ecb27a6353b0185bcd6 (patch)
tree86c5c79552c9e9815697ffb465a30b3181f98d95
parent4651efb5ba5710c91b58c8b86872b264dd71f464 (diff)
IRgen: Fix another case where we generated an invalid access component when we
immediately narrowed the access size. Fix this (and previous case) by just choosing a better access size up-front. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@102068 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--lib/CodeGen/CGRecordLayoutBuilder.cpp25
-rw-r--r--test/CodeGen/bitfield-2.c21
2 files changed, 33 insertions, 13 deletions
diff --git a/lib/CodeGen/CGRecordLayoutBuilder.cpp b/lib/CodeGen/CGRecordLayoutBuilder.cpp
index ee8ae5224f..6302cf8d1f 100644
--- a/lib/CodeGen/CGRecordLayoutBuilder.cpp
+++ b/lib/CodeGen/CGRecordLayoutBuilder.cpp
@@ -191,7 +191,14 @@ static CGBitFieldInfo ComputeBitFieldInfo(CodeGenTypes &Types,
// Round down from the field offset to find the first access position that is
// at an aligned offset of the initial access type.
- uint64_t AccessStart = FieldOffset - (FieldOffset % TypeSizeInBits);
+ uint64_t AccessStart = FieldOffset - (FieldOffset % AccessWidth);
+
+ // Adjust initial access size to fit within record.
+ while (AccessWidth > 8 &&
+ AccessStart + AccessWidth > ContainingTypeSizeInBits) {
+ AccessWidth >>= 1;
+ AccessStart = FieldOffset - (FieldOffset % AccessWidth);
+ }
while (AccessedTargetBits < FieldSize) {
// Check that we can access using a type of this size, without reading off
@@ -210,20 +217,12 @@ static CGBitFieldInfo ComputeBitFieldInfo(CodeGenTypes &Types,
// target. We are reading bits [AccessStart, AccessStart + AccessWidth); the
// intersection with [FieldOffset, FieldOffset + FieldSize) gives the bits
// in the target that we are reading.
+ assert(FieldOffset < AccessStart + AccessWidth && "Invalid access start!");
+ assert(AccessStart < FieldOffset + FieldSize && "Invalid access start!");
uint64_t AccessBitsInFieldStart = std::max(AccessStart, FieldOffset);
uint64_t AccessBitsInFieldSize =
- std::min(AccessWidth - (AccessBitsInFieldStart - AccessStart),
- FieldSize - (AccessBitsInFieldStart-FieldOffset));
-
- // If we haven't accessed any target bits yet and narrowed the access size,
- // we might not have reached any target bits yet.
- //
- // FIXME: This test is unnecessarily once we choose the initial acccess size
- // more intelligently.
- if (!AccessedTargetBits && AccessBitsInFieldSize == 0) {
- AccessStart += AccessWidth;
- continue;
- }
+ std::min(AccessWidth + AccessStart,
+ FieldOffset + FieldSize) - AccessBitsInFieldStart;
assert(NumComponents < 3 && "Unexpected number of components!");
CGBitFieldInfo::AccessInfo &AI = Components[NumComponents++];
diff --git a/test/CodeGen/bitfield-2.c b/test/CodeGen/bitfield-2.c
index 121bd7cd4f..e91859fb72 100644
--- a/test/CodeGen/bitfield-2.c
+++ b/test/CodeGen/bitfield-2.c
@@ -345,3 +345,24 @@ unsigned test_8() {
res ^= g8.f0 ^ g8.f2 ^ g8.f3;
return res;
}
+
+/***/
+
+// This is another case where we narrow the access width immediately.
+//
+// <rdar://problem/7893760>
+
+struct __attribute__((packed)) s9 {
+ unsigned f0 : 7;
+ unsigned f1 : 7;
+ unsigned f2 : 7;
+ unsigned f3 : 7;
+ unsigned f4 : 7;
+ unsigned f5 : 7;
+ unsigned f6 : 7;
+ unsigned f7 : 7;
+};
+
+int f9_load(struct s9 *a0) {
+ return a0->f7;
+}