@@ -27,42 +27,155 @@ using __cxxabiv1::__guard;
2727
2828// Debugging helper, last allocation which returned NULL
2929extern " C" void * _heap_abi_malloc (size_t size, bool unhandled, const void * const caller);
30+ #if UMM_ENABLE_MEMALIGN
31+ extern " C" void * _heap_abi_memalign (size_t alignment, size_t size, bool unhandled, const void * const caller);
32+ #endif
3033
3134extern " C" void __cxa_pure_virtual (void ) __attribute__ ((__noreturn__));
3235extern " C" void __cxa_deleted_virtual (void ) __attribute__ ((__noreturn__));
3336
34- #if defined(__cpp_exceptions) && (defined(DEBUG_ESP_OOM) \
35- || defined (DEBUG_ESP_PORT) || defined (DEBUG_ESP_WITHINISR) || defined (MIN_ESP_OOM))
37+
3638/*
37- When built with C++ Exceptions: "enabled", track caller address of Last OOM.
38- * For debug build, force enable Last OOM tracking.
39- * With the option "DEBUG_ESP_OOM," always do Last OOM tracking.
40- * Otherwise, disable Last OOM tracking. The build relies on the weak link to
41- the default C++ exception handler. This saves about 232 bytes of IROM, when
42- using C++ exceptions.
43-
44- //C C++ Exception "enabled" already adds 28,868 bytes to the build does another
45- //C 232 matter that much? Added define MIN_ESP_OOM for experimention.
46-
47- If desired, define MIN_ESP_OOM to continue with a minimum OOM tracking for
48- C++ exception builds.
39+ This is what I perceived to be the intent of the original code.
40+
41+ Use C++ "Replaceable allocation functions" to install debug wrappers to catch
42+ additional information for debugging. The default C++ exception handlers use
43+ weak links.
44+
45+ C++ Exceptions: "enabled" -
46+ * With debug (eg. "Debug port: Serial"), do full caller info capture and
47+ Heap debug checks. "Replaceable allocation functions" are in use by the
48+ debugging code. "Replaceable allocation functions" are not available to
49+ the Sketch.
50+ * Without debug, no OOM details captured. The C++ "Replaceable allocation
51+ functions" are available to the Sketch.
52+
53+ C++ Exceptions: "disabled" -
54+ * C++ "Replaceable allocation functions" are always in use.
55+ * With debug, do full caller info capture and Heap debug checks.
56+ * Without debug, capture minimum OOM information. Calling address and size
57+ of last alloc failure.
4958*/
5059
60+ #if defined(__cpp_exceptions) && \
61+ (defined (DEBUG_ESP_OOM) || defined (DEBUG_ESP_PORT) || defined (DEBUG_ESP_WITHINISR) || defined (MIN_ESP_OOM))
5162
5263// Debug replacement adaptation from ".../new_op.cc".
5364using std::new_handler;
5465using std::bad_alloc;
5566
56- static void * _heap_new (std::size_t size, const void * caller)
67+ #if defined(UMM_ENABLE_MEMALIGN)
68+
69+ // Includes C++ exceptions
70+ // Includes C++17 operator new align variants
71+
72+ static void * _heap_new_align (std::size_t size, std::size_t alignment, const void * caller)
5773{
74+ /*
75+ "Alignment must be a power of two."
76+ The C++ sample code did this: if (__builtin_expect(!std::__has_single_bit(alignment), false)) throw(bad_alloc());
77+
78+ From https://en.cppreference.com/w/cpp/memory/c/aligned_alloc
79+ "alignment - specifies the alignment. Must be a valid alignment
80+ supported by the implementation."
81+
82+ I leave the validation to the umm_malloc library. See umm_memalign() for
83+ details. Generally speaking, zero is handled as default and the default
84+ is sizeof(umm_block), 8-bytes.
85+ */
86+
5887 void * p;
5988
60- /* malloc (0) is unpredictable; avoid it. */
61- if (__builtin_expect (size == 0 , false )) {
62- size = 1 ;
89+ while (nullptr == (p = _heap_abi_memalign (alignment, size, false , caller))) {
90+ new_handler handler = std::get_new_handler ();
91+ if (!handler) {
92+ throw (bad_alloc ());
93+ }
94+ handler ();
95+ }
96+
97+ return p;
98+ }
99+
100+
101+ // new_opa
102+ void * operator new (std::size_t size, std::align_val_t alignment)
103+ {
104+ return _heap_new_align (size, std::size_t (alignment), __builtin_return_address (0 ));
105+ }
106+
107+ // new_opva
108+ void * operator new [] (std::size_t size, std::align_val_t alignment)
109+ {
110+ return _heap_new_align (size, std::size_t (alignment), __builtin_return_address (0 ));
111+ }
112+
113+ // new_opant
114+ void * operator new (std::size_t size, std::align_val_t alignment, const std::nothrow_t &) noexcept
115+ {
116+ __try {
117+ return _heap_new_align (size, std::size_t (alignment), __builtin_return_address (0 ));
118+ }
119+ __catch (...) {
120+ return nullptr ;
121+ }
122+ }
123+
124+ // new_opvant
125+ void * operator new [] (std::size_t size, std::align_val_t alignment, const std::nothrow_t &) noexcept
126+ {
127+ __try {
128+ return _heap_new_align (size, std::size_t (alignment), __builtin_return_address (0 ));
129+ }
130+ __catch (...) {
131+ return nullptr ;
63132 }
133+ }
64134
65- while (0 == (p = _heap_abi_malloc (size, false , caller))) {
135+ // new_op
136+ void * operator new (std::size_t size)
137+ {
138+ return _heap_new_align (size, __STDCPP_DEFAULT_NEW_ALIGNMENT__, __builtin_return_address (0 ));
139+ }
140+
141+ // new_opv
142+ void * operator new [] (std::size_t size)
143+ {
144+ return _heap_new_align (size, __STDCPP_DEFAULT_NEW_ALIGNMENT__, __builtin_return_address (0 ));
145+ }
146+
147+ // new_opnt
148+ void * operator new (size_t size, const std::nothrow_t &) noexcept
149+ {
150+ __try {
151+ return _heap_new_align (size, __STDCPP_DEFAULT_NEW_ALIGNMENT__, __builtin_return_address (0 ));
152+ }
153+ __catch (...) {
154+ return nullptr ;
155+ }
156+ }
157+
158+ // new_opvnt
159+ void * operator new [] (size_t size, const std::nothrow_t &) noexcept
160+ {
161+ __try {
162+ return _heap_new_align (size, __STDCPP_DEFAULT_NEW_ALIGNMENT__, __builtin_return_address (0 ));
163+ }
164+ __catch (...) {
165+ return nullptr ;
166+ }
167+ }
168+
169+ #else // ! UMM_ENABLE_MEMALIGN
170+
171+ // Includes C++ exceptions
172+ // Without C++17 operator new align variants
173+
174+ static void * _heap_new (std::size_t size, const void * caller)
175+ {
176+ void * p;
177+
178+ while (nullptr == (p = _heap_abi_malloc (size, false , caller))) {
66179 new_handler handler = std::get_new_handler ();
67180 if (!handler) {
68181 throw (bad_alloc ());
@@ -102,19 +215,64 @@ void* operator new[] (size_t size, const std::nothrow_t&) noexcept
102215 return nullptr ;
103216 }
104217}
105- /*
106- TODO:
107- Current master does not support "new" align operations. Compiler reports:
108- "/workdir/repo/gcc-gnu/libstdc++-v3/libsupc++/new_opa.cc:86: undefined reference to `memalign'"
109- Look at enhancement to umm_malloc for an alignment option.
110- */
218+ #endif // #if UMM_ENABLE_MEMALIGN
111219
112220#elif !defined(__cpp_exceptions)
113221// When doing builds with C++ Exceptions "disabled", always save details of
114222// the last OOM event.
115223
116224// overwrite weak operators new/new[] definitions
117225
226+ #if defined(UMM_ENABLE_MEMALIGN)
227+
228+ // Without C++ exceptions
229+ // Includes C++17 operator new align variants
230+
231+ void * operator new (size_t size, std::align_val_t alignment)
232+ {
233+ return _heap_abi_memalign (std::size_t (alignment), size, true , __builtin_return_address (0 ));
234+ }
235+
236+ void * operator new [] (size_t size, std::align_val_t alignment)
237+ {
238+ return _heap_abi_memalign (std::size_t (alignment), size, true , __builtin_return_address (0 ));
239+ }
240+
241+ void * operator new (size_t size, std::align_val_t alignment, const std::nothrow_t &)
242+ {
243+ return _heap_abi_memalign (std::size_t (alignment), size, false , __builtin_return_address (0 ));
244+ }
245+
246+ void * operator new [] (size_t size, std::align_val_t alignment, const std::nothrow_t &)
247+ {
248+ return _heap_abi_memalign (std::size_t (alignment), size, false , __builtin_return_address (0 ));
249+ }
250+
251+ void * operator new (size_t size)
252+ {
253+ return _heap_abi_memalign (__STDCPP_DEFAULT_NEW_ALIGNMENT__, size, true , __builtin_return_address (0 ));
254+ }
255+
256+ void * operator new [] (size_t size)
257+ {
258+ return _heap_abi_memalign (__STDCPP_DEFAULT_NEW_ALIGNMENT__, size, true , __builtin_return_address (0 ));
259+ }
260+
261+ void * operator new (size_t size, const std::nothrow_t &)
262+ {
263+ return _heap_abi_memalign (__STDCPP_DEFAULT_NEW_ALIGNMENT__, size, false , __builtin_return_address (0 ));
264+ }
265+
266+ void * operator new [] (size_t size, const std::nothrow_t &)
267+ {
268+ return _heap_abi_memalign (__STDCPP_DEFAULT_NEW_ALIGNMENT__, size, false , __builtin_return_address (0 ));
269+ }
270+
271+ #else
272+
273+ // Without C++ exceptions
274+ // Without C++17 operator new align variants
275+
118276void * operator new (size_t size)
119277{
120278 return _heap_abi_malloc (size, true , __builtin_return_address (0 ));
@@ -134,8 +292,27 @@ void* operator new[] (size_t size, const std::nothrow_t&)
134292{
135293 return _heap_abi_malloc (size, false , __builtin_return_address (0 ));
136294}
295+ #endif // #elif !defined(__cpp_exceptions) #if defined(UMM_ENABLE_MEMALIGN)
296+ #else
297+ /*
298+ Using weaklink C++ Exception handlers in libstdc
299+ The "new" operators that express alignment should work through libstdc via
300+ memalign() in the umm_malloc library.
301+
302+ This saves 20 bytes in the UMM_ENABLE_MEMALIGN=1 case and 32 bytes when
303+ UMM_ENABLE_MEMALIGN=0.
304+
305+ */
306+ // D <<
307+ // C temporary pragmas remove before merge
308+ #pragma message("Using weaklink C++ Exception handlers in libstdc")
309+ #if UMM_ENABLE_MEMALIGN
310+ #pragma message("The \"new\" operators that express alignment should work through libstdc via memalign() in the umm_malloc library.")
311+ // D >>
312+ #endif
313+
314+ #endif // #if defined(__cpp_exceptions)
137315
138- #endif // !defined(__cpp_exceptions)
139316
140317void __cxa_pure_virtual (void )
141318{
0 commit comments