@@ -139,8 +139,8 @@ typedef struct {
139139    uint8_t  buffer [NUMBER_OF_BANKS ][DMA_BUFFER_SIZE ];
140140    uint32_t  usage_counter ;
141141    uint8_t  tx_data ;
142-     volatile   uint8_t  tx_in_progress ;
143-     volatile   uint8_t  rx_in_progress ;
142+     bool  tx_in_progress ;
143+     bool  rx_in_progress ;
144144    bool  tx_asynch ;
145145    bool  rx_asynch ;
146146    bool  callback_posted ;
@@ -253,7 +253,7 @@ static void nordic_nrf5_uart_callback_handler(uint32_t instance)
253253static  void  nordic_nrf5_uart_event_handler_endtx (int  instance )
254254{
255255    /* Release mutex. As the owner this call is safe. */ 
256-     nordic_nrf5_uart_state [instance ].tx_in_progress   =   0 ;
256+     core_util_atomic_store_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress , false) ;
257257
258258    /* Check if callback handler and Tx event mask is set. */ 
259259    uart_irq_handler  callback  =  (uart_irq_handler ) nordic_nrf5_uart_state [instance ].owner -> handler ;
@@ -276,8 +276,8 @@ static void nordic_nrf5_uart_event_handler_endtx(int instance)
276276static  void  nordic_nrf5_uart_event_handler_endtx_asynch (int  instance )
277277{
278278    /* Set Tx done and reset Tx mode to be not asynchronous. */ 
279-     nordic_nrf5_uart_state [instance ].tx_in_progress  =  0 ;
280279    nordic_nrf5_uart_state [instance ].tx_asynch  =  false;
280+     core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , false);
281281
282282    /* Cast handler to callback function pointer. */ 
283283    void  (* callback )(void ) =  (void  (* )(void )) nordic_nrf5_uart_state [instance ].owner -> tx_handler ;
@@ -483,8 +483,8 @@ static void nordic_nrf5_uart_event_handler_rxstarted(int instance)
483483static  void  nordic_nrf5_uart_event_handler_endrx_asynch (int  instance )
484484{
485485    /* Set Rx done and reset Rx mode to be not asynchronous. */ 
486-     nordic_nrf5_uart_state [instance ].rx_in_progress  =  0 ;
487486    nordic_nrf5_uart_state [instance ].rx_asynch  =  false;
487+     core_util_atomic_store_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , false);
488488
489489    /* Cast handler to callback function pointer. */ 
490490    void  (* callback )(void ) =  (void  (* )(void )) nordic_nrf5_uart_state [instance ].owner -> rx_handler ;
@@ -1411,7 +1411,7 @@ int serial_writable(serial_t *obj)
14111411
14121412    int  instance  =  uart_object -> instance ;
14131413
1414-     return  (( nordic_nrf5_uart_state [instance ].tx_in_progress   ==   0 ) && 
1414+     return  (! core_util_atomic_load_bool ( & nordic_nrf5_uart_state [instance ].tx_in_progress ) && 
14151415            (nrf_uarte_event_extra_check (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_TXDRDY )));
14161416}
14171417
@@ -1470,16 +1470,14 @@ int serial_tx_asynch(serial_t *obj, const void *tx, size_t tx_length, uint8_t tx
14701470
14711471    /** 
14721472     * tx_in_progress acts like a mutex to ensure only one transmission can be active at a time. 
1473-      * The flag is modified using the atomic compare-and-set function. 
1473+      * The flag is modified using the atomic exchange function - only proceed when we see the 
1474+      * flag clear and we set it to true. 
14741475     */ 
1475-     bool  mutex   =  false ;
1476+     bool  old_mutex ;
14761477
14771478    do  {
1478-         uint8_t  expected  =  0 ;
1479-         uint8_t  desired  =  1 ;
1480- 
1481-         mutex  =  core_util_atomic_cas_u8 ((uint8_t  * ) & nordic_nrf5_uart_state [instance ].tx_in_progress , & expected , desired );
1482-     } while  (mutex  ==  false);
1479+         old_mutex  =  core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].tx_in_progress , true);
1480+     } while  (old_mutex  ==  true);
14831481
14841482    /* State variables. */ 
14851483    int  result  =  0 ;
@@ -1596,16 +1594,14 @@ void serial_rx_asynch(serial_t *obj, void *rx, size_t rx_length, uint8_t rx_widt
15961594
15971595    /** 
15981596     * rx_in_progress acts like a mutex to ensure only one asynchronous reception can be active at a time. 
1599-      * The flag is modified using the atomic compare-and-set function. 
1597+      * The flag is modified using the atomic exchange function - only proceed when we see the 
1598+      * flag clear and we set it to true. 
16001599     */ 
1601-     bool  mutex   =  false ;
1600+     bool  old_mutex ;
16021601
16031602    do  {
1604-         uint8_t  expected  =  0 ;
1605-         uint8_t  desired  =  1 ;
1606- 
1607-         mutex  =  core_util_atomic_cas_u8 ((uint8_t  * ) & nordic_nrf5_uart_state [instance ].rx_in_progress , & expected , desired );
1608-     } while  (mutex  ==  false);
1603+         old_mutex  =  core_util_atomic_exchange_bool (& nordic_nrf5_uart_state [instance ].rx_in_progress , true);
1604+     } while  (old_mutex  ==  true);
16091605
16101606    /* Store callback handler, mask and reset event value. */ 
16111607    obj -> serial .rx_handler  =  handler ;
@@ -1684,8 +1680,8 @@ void serial_tx_abort_asynch(serial_t *obj)
16841680    nrf_uarte_event_clear (nordic_nrf5_uart_register [instance ], NRF_UARTE_EVENT_ENDTX );
16851681
16861682    /* Reset Tx flags. */ 
1687-     nordic_nrf5_uart_state [instance ].tx_in_progress  =  0 ;
16881683    nordic_nrf5_uart_state [instance ].tx_asynch  =  false;
1684+     nordic_nrf5_uart_state [instance ].tx_in_progress  =  false;
16891685
16901686    /* Force reconfiguration. */ 
16911687    obj -> serial .update  =  true;
@@ -1712,8 +1708,8 @@ void serial_rx_abort_asynch(serial_t *obj)
17121708    core_util_critical_section_enter ();
17131709
17141710    /* Reset Rx flags. */ 
1715-     nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress  =  0 ;
17161711    nordic_nrf5_uart_state [obj -> serial .instance ].rx_asynch  =  false;
1712+     nordic_nrf5_uart_state [obj -> serial .instance ].rx_in_progress  =  false;
17171713    obj -> serial .rx_asynch  =  false;
17181714
17191715    /* Force reconfiguration. */ 
0 commit comments