summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorShane Kearns <shane.kearns@accenture.com>2009-12-07 14:26:39 (GMT)
committermread <qt-info@nokia.com>2010-09-30 14:48:28 (GMT)
commit3579fd98fbaf95d7dfc4d05f34e00353a295d340 (patch)
tree858247bc0ec77c324a2499c9ea26fd3fd51d53d8 /src
parentf47f2e3200103e042ae6af4918c93308b4ec014d (diff)
downloadQt-3579fd98fbaf95d7dfc4d05f34e00353a295d340.zip
Qt-3579fd98fbaf95d7dfc4d05f34e00353a295d340.tar.gz
Qt-3579fd98fbaf95d7dfc4d05f34e00353a295d340.tar.bz2
Enable call stack tracing of allocs, for memory leak debugging
Task-number: QT-3967 Reviewed-by: mread
Diffstat (limited to 'src')
-rw-r--r--src/corelib/arch/symbian/arch.pri5
-rw-r--r--src/corelib/arch/symbian/newallocator.cpp70
-rw-r--r--src/corelib/arch/symbian/newallocator_p.h1
3 files changed, 55 insertions, 21 deletions
diff --git a/src/corelib/arch/symbian/arch.pri b/src/corelib/arch/symbian/arch.pri
index 94b1caa..bab042c 100644
--- a/src/corelib/arch/symbian/arch.pri
+++ b/src/corelib/arch/symbian/arch.pri
@@ -2,8 +2,9 @@
# Symbian architecture
#
SOURCES += $$QT_ARCH_CPP/qatomic_symbian.cpp \
- $$QT_ARCH_CPP/newallocator.cpp \
- $$QT_ARCH_CPP/../generic/qatomic_generic_armv6.cpp
+ $$QT_ARCH_CPP/../armv6/qatomic_generic_armv6.cpp \
+ $$QT_ARCH_CPP/newallocator.cpp
HEADERS += $$QT_ARCH_CPP/dla_p.h \
$$QT_ARCH_CPP/newallocator_p.h
+exists($$EPOCROOT/epoc32/include/u32std.h):DEFINES += QT_SYMBIAN_HAVE_U32STD_H
diff --git a/src/corelib/arch/symbian/newallocator.cpp b/src/corelib/arch/symbian/newallocator.cpp
index 6250371..7025483 100644
--- a/src/corelib/arch/symbian/newallocator.cpp
+++ b/src/corelib/arch/symbian/newallocator.cpp
@@ -49,8 +49,8 @@
#include <e32cmn.h>
#include <hal.h>
#include <e32panic.h>
-#define RND_SDK
-#ifndef RND_SDK
+
+#ifndef QT_SYMBIAN_HAVE_U32STD_H
struct SThreadCreateInfo
{
TAny* iHandle;
@@ -93,11 +93,14 @@ struct SStdEpocThreadCreateInfo : public SThreadCreateInfo
//This debug flag uses BTRACE to emit debug traces to identify the heaps.
//Note that it uses the ETest1 trace category which is not reserved
-#define TRACING_HEAPS
+//#define TRACING_HEAPS
//This debug flag uses BTRACE to emit debug traces to aid with debugging
//allocs, frees & reallocs. It should be used together with the KUSERHEAPTRACE
//kernel trace flag to enable heap tracing.
-#define TRACING_ALLOCS
+//#define TRACING_ALLOCS
+//This debug flag turns on tracing of the call stack for each alloc trace.
+//It is dependent on TRACING_ALLOCS.
+//#define TRACING_CALLSTACKS
#if defined(TRACING_ALLOCS) || defined(TRACING_HEAPS)
#include <e32btrace.h>
@@ -129,6 +132,32 @@ LOCAL_C void Panic(TCdtPanic aPanic)
User::Panic(_L("USER"),aPanic);
}
+#define STACKSIZE 32
+inline void RNewAllocator::TraceCallStack()
+{
+#ifdef TRACING_CALLSTACKS
+ TUint32 filteredStack[STACKSIZE];
+ TThreadStackInfo info;
+ TUint32 *sp = (TUint32*)&sp;
+ RThread().StackInfo(info);
+ Lock();
+ TInt i;
+ for (i=0;i<STACKSIZE;i++) {
+ if ((TLinAddr)sp>=info.iBase) break;
+ while ((TLinAddr)sp < info.iBase) {
+ TUint32 cur = *sp++;
+ TUint32 range = cur & 0xF0000000;
+ if (range == 0x80000000 || range == 0x70000000) {
+ filteredStack[i] = cur;
+ break;
+ }
+ }
+ }
+ Unlock();
+ BTraceContextBig(BTrace::EHeap, BTrace::EHeapCallStack, (TUint32)this, filteredStack, i * 4);
+#endif
+}
+
size_t getpagesize()
{
TInt size;
@@ -313,6 +342,7 @@ TAny* RNewAllocator::Alloc(TInt aSize)
traceData[1] = aSize;
traceData[2] = aCnt;
BTraceContextN(BTrace::EHeap, BTrace::EHeapAlloc, (TUint32)this, (TUint32)addr, traceData, sizeof(traceData));
+ TraceCallStack();
}
#endif
@@ -382,6 +412,7 @@ void RNewAllocator::Free(TAny* aPtr)
TUint32 traceData;
traceData = aCnt;
BTraceContextN(BTrace::EHeap, BTrace::EHeapFree, (TUint32)this, (TUint32)aPtr, &traceData, sizeof(traceData));
+ TraceCallStack();
}
#endif
}
@@ -393,29 +424,30 @@ void RNewAllocator::Reset()
User::Panic(_L("RNewAllocator"), 1); //this should never be called
}
-inline void RNewAllocator::TraceReAlloc(TAny* aPtr, TInt aSize, TAny* aNewPtr, TInt aZone)
- {
#ifdef TRACING_ALLOCS
- if (aNewPtr && (iFlags & ETraceAllocs))
- {
+inline void RNewAllocator::TraceReAlloc(TAny* aPtr, TInt aSize, TAny* aNewPtr, TInt aZone)
+{
+ if (aNewPtr && (iFlags & ETraceAllocs)) {
TUint32 traceData[3];
traceData[0] = AllocLen(aNewPtr);
traceData[1] = aSize;
- traceData[2] = (TUint32)aPtr;
- BTraceContextN(BTrace::EHeap, BTrace::EHeapReAlloc,(TUint32)this, (TUint32)aNewPtr,traceData, sizeof(traceData));
-
+ traceData[2] = (TUint32) aPtr;
+ BTraceContextN(BTrace::EHeap, BTrace::EHeapReAlloc, (TUint32) this, (TUint32) aNewPtr,
+ traceData, sizeof(traceData));
+ TraceCallStack();
//workaround for SAW not handling reallocs properly
- if(aZone >= 0 && aPtr != aNewPtr) {
- BTraceContextN(BTrace::EHeap, BTrace::EHeapFree, (TUint32)this, (TUint32)aPtr, &aZone, sizeof(aZone));
- }
+ if (aZone >= 0 && aPtr != aNewPtr) {
+ BTraceContextN(BTrace::EHeap, BTrace::EHeapFree, (TUint32) this, (TUint32) aPtr,
+ &aZone, sizeof(aZone));
+ TraceCallStack();
}
+ }
+}
#else
- Q_UNUSED(aPtr);
- Q_UNUSED(aSize);
- Q_UNUSED(aNewPtr);
- Q_UNUSED(aZone);
+//Q_UNUSED generates code that prevents the compiler optimising out the empty inline function
+inline void RNewAllocator::TraceReAlloc(TAny* , TInt , TAny* , TInt )
+{}
#endif
- }
TAny* RNewAllocator::ReAlloc(TAny* aPtr, TInt aSize, TInt /*aMode = 0*/)
{
diff --git a/src/corelib/arch/symbian/newallocator_p.h b/src/corelib/arch/symbian/newallocator_p.h
index c72f96b..fd28f2d 100644
--- a/src/corelib/arch/symbian/newallocator_p.h
+++ b/src/corelib/arch/symbian/newallocator_p.h
@@ -248,6 +248,7 @@ private:
enum {npagecells=4};
pagecell pagelist[npagecells]; // descriptors for page-aligned large allocations
inline void TraceReAlloc(TAny* aPtr, TInt aSize, TAny* aNewPtr, TInt aZone);
+ inline void TraceCallStack();
// to track maximum used
//TInt iHighWaterMark;