@@ -59,22 +59,35 @@ exception_ptr current_exception() noexcept
59
59
return active_exception;
60
60
}
61
61
62
- inline void capture_cpu_core (ke::cortex_m_cpu& p_cpu_core)
62
+ /* *
63
+ * @brief Captures all registers CPU registers
64
+ *
65
+ * Registers captured are R4-R12 and SP. The LR (link) register is copied into
66
+ * the PC register of the `ke::cortex_m_cpu&`. The LR member variable of the
67
+ * input parameter is left as is and will be updated as the unwinding proceeds.
68
+ *
69
+ */
70
+ [[gnu::naked, gnu::noinline]] void capture_cpu_core (ke::cortex_m_cpu&)
63
71
{
64
- register std::uint32_t * res asm (" r3" ) = &p_cpu_core.r4 .data ;
65
-
66
- // We only capture r4 to r12 because __cxa_throw & __cxa_rethrow should be
67
- // normal functions. Meaning they will not utilize the `sp = r[nnnn]`
68
- // instruction, meaning that the callee unpreserved registers can be left
69
- // alone.
70
- asm volatile (" mov r0, pc\n " // Move PC to r0 (Before pipeline)
71
- " stmia r3, {r4-r12}\n " // Store r4 to r12 into the array @ &r4
72
- " str sp, [r3, #36]\n " // Store SP @ 36
73
- " str lr, [r3, #40]\n " // Store LR @ 40
74
- " str r0, [r3, #44]\n " // Store PC @ 44
75
- : // no output
76
- : " r" (res) // input is the address of the array
77
- : " memory" , " r0" );
72
+ asm volatile (
73
+ // ARM calling conventions states that arguments are passed into R0-R3.
74
+ // The single reference parameter p_cpu_core is in R0.
75
+ //
76
+ // Add 16 bytes to the address within R0 to skip over R0-R3. We assume
77
+ // `__cxa_throw` & `__cxa_rethrow` will never utilize the `sp = r[nnnn]`
78
+ // instruction, meaning that the non-callee preserved registers can be
79
+ // skipped. R0 now points to `ke::cortex_m_cpu::r4`
80
+ " add r0, #16\n "
81
+ // Store R4 to R12 into `ke::cortex_m_cpu::r4` to `ke::cortex_m_cpu::r12`.
82
+ // The ! means, increment R<N> by the number of words stored. R0 now points
83
+ // at the `ke::cortex_m_cpu&`'s SP register.
84
+ " stmia r0!, {r4-r12}\n "
85
+ // Store SP into [R0] (`ke::cortex_m_cpu&`'s SP register)
86
+ " str sp, [r0]\n "
87
+ // Store LR into [R0 + 8] (`ke::cortex_m_cpu&`'s PC register)
88
+ " str lr, [r0, #8]\n "
89
+ " bx lr\n "
90
+ " \n " );
78
91
}
79
92
80
93
struct index_less_than
@@ -92,26 +105,22 @@ struct index_less_than
92
105
{
93
106
return left < right.function ();
94
107
}
108
+ bool operator ()(std::uint32_t left, std::uint32_t right)
109
+ {
110
+ return left < right;
111
+ }
95
112
};
96
113
97
114
std::span<index_entry_t const > get_arm_exception_index ()
98
115
{
99
- return { reinterpret_cast <index_entry_t const *>(&__exidx_start),
100
- reinterpret_cast <index_entry_t const *>(&__exidx_end) };
116
+ return { &__exidx_start, &__exidx_end };
101
117
}
102
118
103
- // [[gnu::used]] std::span<std::uint32_t const> get_arm_exception_table()
104
- // {
105
- // return { &__extab_start, &__extab_end };
106
- // }
107
-
108
119
index_entry_t const & get_index_entry (std::uint32_t p_program_counter)
109
120
{
110
121
auto const index_table = get_arm_exception_index ();
111
- auto const & index = std::upper_bound (index_table.begin (),
112
- index_table.end (),
113
- p_program_counter,
114
- index_less_than{});
122
+ auto const & index =
123
+ std::ranges::upper_bound (index_table, p_program_counter, index_less_than{});
115
124
116
125
if (index == index_table.begin ()) {
117
126
return *index;
@@ -176,7 +185,8 @@ index_entry_t const& get_index_entry(std::uint32_t p_program_counter)
176
185
if ((sleb128 & 0x80 ) == 0x00 ) {
177
186
auto const bytes_consumed = i + 1 ;
178
187
auto const loaded_bits = bytes_consumed * leb128_bits;
179
- auto const ext_shift_amount = (32 - loaded_bits);
188
+ auto const ext_shift_amount =
189
+ static_cast <std::int32_t >(32L - loaded_bits);
180
190
// Shift to the left up to the signed MSB bit
181
191
result <<= ext_shift_amount;
182
192
// Arithmetic shift right to sign extend number
@@ -322,6 +332,7 @@ template<lsda_encoding encoding>
322
332
323
333
// Handle indirection GCC extension
324
334
if constexpr (static_cast <bool >(encoding & 0x80 )) {
335
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
325
336
result = *reinterpret_cast <std::uintptr_t const *>(result);
326
337
}
327
338
@@ -377,42 +388,40 @@ template<lsda_encoding encoding>
377
388
378
389
// Handle indirection GCC extension
379
390
if (static_cast <bool >(p_encoding & 0x80 )) {
391
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
380
392
result = *reinterpret_cast <std::uintptr_t const *>(result);
381
393
}
382
394
383
395
return result;
384
396
}
385
397
386
- inline void restore_cpu_core (ke:: cortex_m_cpu& p_cpu_core )
398
+ [[gnu::naked]] [[noreturn]] inline void restore_cpu_core (cortex_m_cpu&)
387
399
{
388
- // Skip R2 because it is not used in the exception unwinding
389
- // Skip R3 because we are using it
390
- asm volatile (" ldmia.w %[reg], {r0, r1}\n " // R3 is incremented by 8
391
- " add %[reg], #16\n " // Skip Past R2 + R3
392
- " ldmia.w %[reg], {r4, r5, r6, r7, r8, r9, r10, r11, r12}\n "
393
- " ldr sp, [%[reg], #36]\n " // Load SP
394
- " ldr lr, [%[reg], #40]\n " // Load LR
395
- " ldr pc, [%[reg], #44]\n " // Load PC
396
- :
397
- : [reg] " r" (&p_cpu_core)
398
- : " memory" ,
399
- " r0" ,
400
- " r1" ,
401
- " r2" ,
402
- // skip r3 & use it as the offset register
403
- " r4" ,
404
- " r5" ,
405
- " r6" ,
406
- " r7" ,
407
- " fp" ,
408
- " r8" ,
409
- " r9" ,
410
- " r10" ,
411
- " r11" ,
412
- " r12" ,
413
- // sp skipped here as it is deprecated
414
- " lr" ,
415
- " pc" );
400
+ asm volatile (
401
+ " \n "
402
+ // ARM calling conventions states that arguments are passed into R0-R3.
403
+ // The single reference parameter p_cpu_core is in R0.
404
+ // We move the address to R2, since we need to set r0 and r1 to the
405
+ // exception error object's address and filter number respectively
406
+ " mov r2, r0\n "
407
+ // Since our cortex_m_cpu object has the layout of 16x u32 values, we can
408
+ // just iterate through the struct like an array.
409
+ //
410
+ // This instruction loads R0 and R1 with the contents addressed by R2
411
+ " ldmia.w r2, {r0, r1}\n "
412
+ // Move address by 16-bytes or 4x 32-bit words which skips past R0 to R3.
413
+ // R2 now points to R4 within the `cortex_m_cpu` object.
414
+ " add r2, #16\n "
415
+ // Load R4 to R12 from `cortex_m_cpu` and increment the register's
416
+ // address. R2 now points to the SP register
417
+ " ldmia.w r2!, {r4, r5, r6, r7, r8, r9, r10, r11, r12}\n "
418
+ // Unfortunately SP cannot be in the list above, so we set it here
419
+ " ldr sp, [r2]\n "
420
+ // Load LR
421
+ " ldr lr, [r2, #4]\n "
422
+ // Load PC which will jump us to the landing pad
423
+ " ldr pc, [r2, #8]\n "
424
+ " " );
416
425
}
417
426
418
427
inline void skip_dwarf_info (std::uint8_t const ** p_lsda)
@@ -534,16 +543,19 @@ inline call_site_info parse_uleb128_call_site(
534
543
class action_decoder
535
544
{
536
545
public:
546
+ // NOLINTBEGIN(bugprone-easily-swappable-parameters)
537
547
action_decoder (std::uint8_t const * p_type_table_end,
538
548
std::uint8_t const * p_end_of_callsite,
539
549
std::uint32_t p_action)
550
+ // NOLINTEND(bugprone-easily-swappable-parameters)
540
551
: m_type_table_end(p_type_table_end)
541
552
, m_action_position(p_end_of_callsite + (p_action - 1 ))
542
553
{
543
554
}
544
555
545
556
static std::type_info const * to_type_info (void const * p_type_info_address)
546
557
{
558
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
547
559
return reinterpret_cast <std::type_info const *>(
548
560
to_absolute_address (p_type_info_address));
549
561
}
@@ -563,11 +575,12 @@ class action_decoder
563
575
// prel31_offsets
564
576
auto const * current_type = &type_table[-m_filter];
565
577
566
- if (*current_type == 0x0 ) {
578
+ if (*current_type == nullptr ) {
567
579
return install_context_type ();
568
580
}
569
581
570
- auto const * test = to_absolute_address_ptr (current_type);
582
+ auto const * test =
583
+ to_absolute_address_ptr (static_cast <void const *>(current_type));
571
584
return reinterpret_cast <std::type_info const *>(test);
572
585
}
573
586
@@ -694,11 +707,10 @@ inline void enter_function(exception_object& p_exception_object)
694
707
type_info = a_decoder.get_next_catch_type ()) {
695
708
696
709
// This is our dynamic cast :P
697
- auto position = std::find_if (p_exception_object.type_info .begin (),
698
- p_exception_object.type_info .end (),
699
- [&type_info](auto const & element) -> bool {
700
- return element.type_info == type_info;
701
- });
710
+ auto position = std::ranges::find_if (
711
+ p_exception_object.type_info , [&type_info](auto const & element) -> bool {
712
+ return element.type_info == type_info;
713
+ });
702
714
703
715
if (position == p_exception_object.type_info .end () &&
704
716
type_info != action_decoder::install_context_type ()) {
@@ -728,7 +740,7 @@ constexpr std::uint32_t vsp_deallocate_amount()
728
740
return Amount + 1 ;
729
741
}
730
742
731
- enum class pop_lr
743
+ enum class pop_lr : std:: uint8_t
732
744
{
733
745
skip = 0 ,
734
746
do_it = 1 ,
@@ -1778,6 +1790,8 @@ void raise_exception(exception_object& p_exception_object)
1778
1790
}
1779
1791
} // namespace ke
1780
1792
1793
+ // NOLINTBEGIN(bugprone-reserved-identifier)
1794
+ // NOLINTBEGIN(readability-identifier-naming)
1781
1795
extern " C"
1782
1796
{
1783
1797
// mangled name for vtable for __cxxabi::__class_type_info
@@ -1787,9 +1801,11 @@ extern "C"
1787
1801
// mangled name for vtable for __cxxabi::__vmi_class_type_info
1788
1802
extern void * _ZTVN10__cxxabiv121__vmi_class_type_infoE[];
1789
1803
}
1804
+ // NOLINTEND(readability-identifier-naming)
1805
+ // NOLINTEND(bugprone-reserved-identifier)
1790
1806
1791
1807
namespace ke {
1792
- enum class rtti_type
1808
+ enum class rtti_type : std:: uint8_t
1793
1809
{
1794
1810
class_type,
1795
1811
single_inheritance,
@@ -1803,6 +1819,7 @@ rtti_type get_rtti_type(void const* p_type_info)
1803
1819
reinterpret_cast <std::uint32_t const *>(p_type_info);
1804
1820
auto const vtable_method_location = word_pointer[0 ];
1805
1821
auto const vtable_start = vtable_method_location - 8 ;
1822
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
1806
1823
auto const * vtable_address = reinterpret_cast <void const *>(vtable_start);
1807
1824
1808
1825
if (vtable_address == &_ZTVN10__cxxabiv117__class_type_infoE) {
@@ -1825,6 +1842,7 @@ std::type_info const* extract_si_parent_info(void const* p_info)
1825
1842
auto const * word_pointer = reinterpret_cast <std::uint32_t const *>(p_info);
1826
1843
auto const address = word_pointer[parent_info_address];
1827
1844
1845
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
1828
1846
return reinterpret_cast <std::type_info const *>(address);
1829
1847
}
1830
1848
@@ -1856,6 +1874,7 @@ void push_vmi_info(ke::exception_ptr p_thrown_exception,
1856
1874
}
1857
1875
1858
1876
auto const offset = offset_flags >> 8 ;
1877
+ // NOLINTNEXTLINE(performance-no-int-to-ptr)
1859
1878
parent_info.type_info = reinterpret_cast <void *>(parent_address);
1860
1879
if (offset >= 0 ) {
1861
1880
// Shift the lower 8-bits of flag information
@@ -1927,15 +1946,18 @@ namespace {
1927
1946
bool const volatile libhal_convince_compiler_to_emit_metadata = false ;
1928
1947
}
1929
1948
1949
+ // NOLINTBEGIN(bugprone-reserved-identifier)
1930
1950
extern " C"
1931
1951
{
1932
1952
void _exit ([[maybe_unused]] int rc) // NOLINT
1933
1953
{
1934
1954
std::terminate ();
1935
1955
}
1936
- // TODO(#42): Use the applications's polymorphic allocator, not our own space.
1956
+ // NOLINTNEXTLINE(readability-identifier-naming)
1937
1957
void * __wrap___cxa_allocate_exception (size_t p_thrown_size)
1938
1958
{
1959
+ // TODO(#42): Use the applications's polymorphic allocator, not our own
1960
+ // space.
1939
1961
if (p_thrown_size >
1940
1962
ke::exception_buffer.size () + sizeof (ke::exception_object)) {
1941
1963
std::terminate ();
@@ -1944,16 +1966,19 @@ extern "C"
1944
1966
return ke::exception_buffer.data () + sizeof (ke::exception_object);
1945
1967
}
1946
1968
1969
+ // NOLINTNEXTLINE(readability-identifier-naming)
1947
1970
void __wrap___cxa_free_exception ([[maybe_unused]] void * p_thrown_exception)
1948
1971
{
1949
1972
ke::exception_buffer.fill (0 );
1950
1973
}
1951
1974
1952
- void __wrap___cxa_call_unexpected (void *) // NOLINT
1975
+ // NOLINTNEXTLINE(readability-identifier-naming)
1976
+ void __wrap___cxa_call_unexpected (void *)
1953
1977
{
1954
1978
std::terminate ();
1955
1979
}
1956
1980
1981
+ // NOLINTNEXTLINE(readability-identifier-naming)
1957
1982
void __wrap___cxa_end_catch ()
1958
1983
{
1959
1984
auto & exception_object = ke::extract_exception_object (ke::active_exception);
@@ -1964,13 +1989,15 @@ extern "C"
1964
1989
}
1965
1990
}
1966
1991
1992
+ // NOLINTNEXTLINE(readability-identifier-naming)
1967
1993
void * __wrap___cxa_begin_catch (void * p_exception_object)
1968
1994
{
1969
1995
auto * eo = reinterpret_cast <ke::exception_object*>(p_exception_object);
1970
1996
auto * thrown_object = ke::extract_thrown_object (eo);
1971
1997
return thrown_object;
1972
1998
}
1973
1999
2000
+ // NOLINTNEXTLINE(readability-identifier-naming)
1974
2001
void __wrap___cxa_end_cleanup ()
1975
2002
{
1976
2003
auto & exception_object = ke::extract_exception_object (ke::active_exception);
@@ -1980,12 +2007,15 @@ extern "C"
1980
2007
std::terminate ();
1981
2008
}
1982
2009
2010
+ // NOLINTBEGIN(readability-identifier-naming)
1983
2011
[[gnu::used]]
1984
2012
void __wrap__Unwind_Resume (void *)
2013
+ // NOLINTEND(readability-identifier-naming)
1985
2014
{
1986
2015
__wrap___cxa_end_cleanup ();
1987
2016
}
1988
2017
2018
+ // NOLINTNEXTLINE(readability-identifier-naming)
1989
2019
void __wrap___cxa_rethrow () noexcept (false )
1990
2020
{
1991
2021
auto & exception_object = ke::extract_exception_object (ke::active_exception);
@@ -2012,6 +2042,7 @@ extern "C"
2012
2042
std::terminate ();
2013
2043
}
2014
2044
2045
+ // NOLINTNEXTLINE(readability-identifier-naming)
2015
2046
void __wrap___cxa_throw (ke::exception_ptr p_thrown_exception,
2016
2047
std::type_info* p_type_info,
2017
2048
ke::destructor_t p_destructor) noexcept (false )
@@ -2020,6 +2051,7 @@ extern "C"
2020
2051
auto & exception_object = ke::extract_exception_object (p_thrown_exception);
2021
2052
exception_object.destructor = p_destructor;
2022
2053
ke::capture_cpu_core (exception_object.cpu );
2054
+
2023
2055
ke::flatten_rtti<12 >(
2024
2056
p_thrown_exception, exception_object.type_info , p_type_info);
2025
2057
@@ -2040,3 +2072,4 @@ extern "C"
2040
2072
std::terminate ();
2041
2073
}
2042
2074
} // extern "C"
2075
+ // NOLINTEND(bugprone-reserved-identifier)
0 commit comments