typedef __SIZE_TYPE__ size_t; extern size_t strlen (const char *);
struct iovec { void *iov_base; size_t iov_len; }; struct S { const char *abday[7]; const char *day[7]; const char *abmon[12]; const char *mon[12]; const char *am_pm[2]; }; extern void foo (size_t, struct iovec *); void bar (struct S *time) { struct iovec iov[43]; size_t cnt; iov[0].iov_base = (void *) "abc"; iov[0].iov_len = 3; iov[1].iov_base = (void *) "def"; iov[1].iov_len = 3; for (cnt = 0; cnt <= 7; ++cnt) { iov[2 + cnt].iov_base = (void *) (time->abday[cnt] ?: ""); iov[2 + cnt].iov_len = strlen (iov[2 + cnt].iov_base) + 1; } for (; cnt <= 14; ++cnt) { iov[2 + cnt].iov_base = (void *) (time->day[cnt - 7] ?: ""); iov[2 + cnt].iov_len = strlen (iov[2 + cnt].iov_base) + 1; } for (; cnt <= 26; ++cnt) { iov[2 + cnt].iov_base = (void *) (time->abmon[cnt - 14] ?: ""); iov[2 + cnt].iov_len = strlen (iov[2 + cnt].iov_base) + 1; } for (; cnt <= 38; ++cnt) { iov[2 + cnt].iov_base = (void *) (time->mon[cnt - 26] ?: ""); iov[2 + cnt].iov_len = strlen (iov[2 + cnt].iov_base) + 1; } for (; cnt <= 40; ++cnt) { iov[2 + cnt].iov_base = (void *) (time->am_pm[cnt - 38] ?: ""); iov[2 + cnt].iov_len = strlen (iov[2 + cnt].iov_base) + 1; } foo (2 + cnt, iov); } issues at -O2 -Wall bogus warnings: /tmp/w.c:24: warning: array subscript is above array bounds /tmp/w.c:24: warning: array subscript is above array bounds Apparently tree loop optimizations decide to use &time->abmon[14] and &time->mon[30] pointers (which are outside of the range of the arrays) and then dereferences that pointer - 112B (96B is the size of abmon array and 16B are extra 2 pointers because the other array is biased by 2). The testcase is extracted from glibc. -- Summary: [4.3 Regression] -Warray-bounds doesn't play together with loop optimizations Product: gcc Version: 4.3.0 Status: UNCONFIRMED Severity: normal Priority: P3 Component: tree-optimization AssignedTo: unassigned at gcc dot gnu dot org ReportedBy: jakub at gcc dot gnu dot org http://gcc.gnu.org/bugzilla/show_bug.cgi?id=31227