Skip to content

Commit

Permalink
ports/psoc6/machine_i2s.c: WIP refactoring irq buffer transfers.
Browse files Browse the repository at this point in the history
Signed-off-by: enriquezgarc <enriquezgarcia.external@infineon.com>
  • Loading branch information
jaenrig-ifx committed Mar 18, 2024
1 parent adbaf3e commit 34980f0
Showing 1 changed file with 139 additions and 61 deletions.
200 changes: 139 additions & 61 deletions ports/psoc6/machine_i2s.c
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,43 @@ void clock_init(void) {
}
}

static void i2s_dma_rx_empty(machine_i2s_obj_t *self) {
static inline bool i2s_dma_is_tx_complete(cyhal_i2s_event_t event) {
return 0u != (event & CYHAL_I2S_ASYNC_TX_COMPLETE);
}

static inline bool i2s_dma_is_rx_complete(cyhal_i2s_event_t event) {
return 0u != (event & CYHAL_I2S_ASYNC_RX_COMPLETE);
}

static inline void i2s_dma_rx(machine_i2s_obj_t *self) {
cy_rslt_t result = cyhal_i2s_read_async(&self->i2s_obj, self->dma_buffer + self->dma_active_half_index, SIZEOF_HALF_DMA_BUFFER_IN_BYTES);
i2s_assert_raise_val("I2S DMA read failed with return code %lx !", result);
}

static inline void i2s_dma_tx(machine_i2s_obj_t *self) {
cy_rslt_t result = cyhal_i2s_write_async(&self->i2s_obj, self->dma_buffer + self->dma_active_half_index, SIZEOF_HALF_DMA_BUFFER_IN_BYTES);
i2s_assert_raise_val("I2S DMA write configure failed with return code %lx !", result);
}

static inline void i2s_dma_swap_active_dmabuf(machine_i2s_obj_t *self) {
if (self->dma_active_half_index == 0) {
self->dma_active_half_index = SIZEOF_HALF_DMA_BUFFER_IN_BYTES;
} else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
self->dma_active_half_index = 0;
}
}

static inline uint8_t *i2s_get_dma_idle_buffer(machine_i2s_obj_t *self) {
if (self->dma_active_half_index == 0) {
return &(self->dma_buffer[SIZEOF_HALF_DMA_BUFFER_IN_BYTES]);
} else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
return &(self->dma_buffer[0]);
} else {
return 0; // should never get here.
}
}

static void i2s_dma_from_dmabuf_to_ringbuf(machine_i2s_obj_t *self) {

// TODO: convert to format of 8 bytes dma to comply with extmod
// little-endian but consider the channel width in bits, and the word bits (sample resolution)
Expand All @@ -166,7 +202,7 @@ static void i2s_dma_rx_empty(machine_i2s_obj_t *self) {
// 0, 1, -1, -1, -1, -1, -1, -1
// //if 32 bits
// 0, 1, 2, 3, -1, -1, -1 -1
uint8_t *dma_buff_p = self->dma_buffer + self->dma_active_half_index;
uint8_t *dma_buff_p = i2s_get_dma_idle_buffer(self);
uint32_t num_bytes_needed_from_ringbuf = SIZEOF_HALF_DMA_BUFFER_IN_BYTES * (I2S_RX_FRAME_SIZE_IN_BYTES / dma_sample_size_in_bytes);

// when space exists, copy samples into ring buffer
Expand All @@ -175,9 +211,9 @@ static void i2s_dma_rx_empty(machine_i2s_obj_t *self) {
for (uint32_t i = 0; i < SIZEOF_HALF_DMA_BUFFER_IN_BYTES; i += dma_sample_size_in_bytes) {
for (uint8_t j = 0; I2S_RX_FRAME_SIZE_IN_BYTES; j++) {
int8_t r_to_a_mapping = i2s_frame_map[f_index][i];
if (r_to_a_mapping != -1) { // r_a_mapping == -1
if (r_to_a_mapping != -1) {
ringbuf_push(&self->ring_buffer, dma_buff_p[j]);
} else {
} else { // r_a_mapping == -1
ringbuf_push(&self->ring_buffer, 0);
}
}
Expand All @@ -186,77 +222,119 @@ static void i2s_dma_rx_empty(machine_i2s_obj_t *self) {
}
}

static void i2s_dma_tx_feed(machine_i2s_obj_t *self) {
// mono has to copy the left channel into the right

// stereo
static void i2s_dma_from_ringbuf_to_dmabuf(machine_i2s_obj_t *self) {
if (ringbuf_available_data(&self->ring_buffer) >= SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
uint8_t *dma_buffer_p = i2s_get_dma_idle_buffer(self);
// Mono samples duplicate the sample in the right channel
if (self->mode == MONO) {
uint8_t bytes_per_sample = (self->bits == 16? 2 : 4);
for (uint32_t i = 0; i < SIZEOF_HALF_DMA_BUFFER_IN_BYTES; i += (2 * bytes_per_sample)) {
for (uint8_t b = 0; b < bytes_per_sample; b++) {
ringbuf_pop(&self->ring_buffer, &dma_buffer_p[i + b]);
self->dma_buffer[i + b + 2 * bytes_per_sample] = dma_buffer_p[i + b];
}
}
} else if (self->mode == STEREO) {
for (uint32_t i = 0; i < SIZEOF_HALF_DMA_BUFFER_IN_BYTES; i++) {
ringbuf_pop(&self->ring_buffer, &dma_buffer_p[i]);
}
}
}
}

static void i2s_dma_irq_handler(void *arg, cyhal_i2s_event_t event) {
machine_i2s_obj_t *self = (machine_i2s_obj_t *)arg;
if (self->mode == TX /*&& event*/) {

// for non-blocking operation handle the write() method requests.
static void i2s_dma_tx_event_process(machine_i2s_obj_t *self, cyhal_i2s_event_t event) {
if (i2s_dma_is_tx_complete(event)) {
if ((self->io_mode == NON_BLOCKING) && (self->non_blocking_descriptor.copy_in_progress)) {
copy_appbuf_to_ringbuf_non_blocking(self);
}

// TX
// if (i2s_dma_is_tx_complete_event())
// {

// uint8_t dma_tx_emptied_index_p = i2s_dma_get_emptied_dmabuf_index();
// i2s_dma_swap_active_dmabuf();
// i2s_dma_from_ringbuf_to_dmabuf();
// }
if (0u != (event & CYHAL_I2S_ASYNC_TX_COMPLETE)) {

// Swap the active buffer half where the dma transfers
if (self->dma_active_half_index == 0) {
self->dma_active_half_index = SIZEOF_HALF_DMA_BUFFER_IN_BYTES;
} else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
self->dma_active_half_index = 0;
}

// dma has emptied the tx buffer
// update the ringbuffer pos where to keep writing

// for non_blocking we feed the ringbuffer first
// feed_dma()
}
} else if (1 /*&& event*/) {
// RX
// dma has filled the rx buffer
// update the ringbuffer pos where to keep reading
// empty_dma()
// if (i2s_dma_is_rx_complete_event())
// {
// uint8_t dma_rx_filled_index_p = i2s_dma_get_filled_dmabuf_index();
// i2s_dma_swap_active_dmabuf();
// i2s_dma_from_dmabuf_to_ringbuf();
// }
if (0u != (event & CYHAL_I2S_ASYNC_RX_COMPLETE)) {
// uint8_t dma_rx_filled_index_p = self->dma_active_half_index;
// Swap the active buffer half where the dma transfers
if (self->dma_active_half_index == 0) {
self->dma_active_half_index = SIZEOF_HALF_DMA_BUFFER_IN_BYTES;
} else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
self->dma_active_half_index = 0;
}
i2s_dma_swap_active_dmabuf(self);
i2s_dma_tx(self);
i2s_dma_from_ringbuf_to_dmabuf(self);
}
}

cy_rslt_t result = cyhal_i2s_read_async(&self->i2s_obj, self->dma_buffer + self->dma_active_half_index, SIZEOF_HALF_DMA_BUFFER_IN_BYTES);
i2s_assert_raise_val("I2S DMA read configure failed with return code %lx !", result);
}
static void i2s_dma_rx_event_process(machine_i2s_obj_t *self, cyhal_i2s_event_t event) {
if (i2s_dma_is_rx_complete(event)) {
i2s_dma_swap_active_dmabuf(self);
i2s_dma_rx(self);
i2s_dma_from_dmabuf_to_ringbuf(self);

i2s_dma_rx_empty(self);
// for non_blocking we already feed the app_buff
// for non-blocking operation handle the readinto() method requests.
if ((self->io_mode == NON_BLOCKING) && (self->non_blocking_descriptor.copy_in_progress)) {
fill_appbuf_from_ringbuf_non_blocking(self);
}
}
}

static void i2s_dma_irq_handler(void *arg, cyhal_i2s_event_t event) {
machine_i2s_obj_t *self = (machine_i2s_obj_t *)arg;

void (*i2s_dma_event_process_func)(machine_i2s_obj_t *, cyhal_i2s_event_t);
if (self->mode == TX) {
i2s_dma_event_process_func = i2s_dma_tx_event_process;
} else if (self->mode == RX) {
i2s_dma_event_process_func = i2s_dma_rx_event_process;
}

i2s_dma_event_process_func(self, event);

// // for non-blocking operation handle the write() method requests.

// i2s_dma_tx_event_process(event)
// // TX
// if (i2s_dma_is_tx_complete(event))
// {
// i2s_dma_swap_active_dmabuf();
// i2s_dma_from_ringbuf_to_dmabuf();

// // }
// // if (0u != (event & CYHAL_I2S_ASYNC_TX_COMPLETE)) {

// // // Swap the active buffer half where the dma transfers
// // if (self->dma_active_half_index == 0) {
// // self->dma_active_half_index = SIZEOF_HALF_DMA_BUFFER_IN_BYTES;
// // } else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
// // self->dma_active_half_index = 0;
// // }

// // dma has emptied the tx buffer
// // update the ringbuffer pos where to keep writing

// // for non_blocking we feed the ringbuffer first
// // feed_dma()
// } else if (1 /*&& event*/) {
// // RX
// // dma has filled the rx buffer
// // update the ringbuffer pos where to keep reading
// // empty_dma()
// // if (i2s_dma_is_rx_complete_event())
// // {
// // uint8_t dma_rx_filled_index_p = i2s_dma_get_filled_dmabuf_index();
// // i2s_dma_swap_active_dmabuf();
// // i2s_dma_from_dmabuf_to_ringbuf();
// // }
// if (0u != (event & CYHAL_I2S_ASYNC_RX_COMPLETE)) {
// // uint8_t dma_rx_filled_index_p = self->dma_active_half_index;
// // Swap the active buffer half where the dma transfers
// if (self->dma_active_half_index == 0) {
// self->dma_active_half_index = SIZEOF_HALF_DMA_BUFFER_IN_BYTES;
// } else if (self->dma_active_half_index == SIZEOF_HALF_DMA_BUFFER_IN_BYTES) {
// self->dma_active_half_index = 0;
// }

// cy_rslt_t result = cyhal_i2s_read_async(&self->i2s_obj, self->dma_buffer + self->dma_active_half_index, SIZEOF_HALF_DMA_BUFFER_IN_BYTES);
// i2s_assert_raise_val("I2S DMA read configure failed with return code %lx !", result);
// }

// i2s_dma_rx_empty(self);
// // for non_blocking we already feed the app_buff
// // for non-blocking operation handle the readinto() method requests.
// if ((self->io_mode == NON_BLOCKING) && (self->non_blocking_descriptor.copy_in_progress)) {
// fill_appbuf_from_ringbuf_non_blocking(self);
// }
// }
}

static void i2s_init(machine_i2s_obj_t *self, cyhal_clock_t *clock) {
cyhal_i2s_pins_t pins = { .sck = self->sck, .ws = self->ws, .data = self->sd, .mclk = NC };
cyhal_i2s_config_t config =
Expand Down

0 comments on commit 34980f0

Please sign in to comment.