3#if SOC_GDMA_SUPPORTED && SOC_UHCI_SUPPORTED
10#include "esp_heap_caps.h"
11#include "esp_memory_utils.h"
12#include "esp_private/periph_ctrl.h"
13#include "hal/uhci_ll.h"
14#include "soc/ext_mem_defs.h"
20constexpr uint32_t kDmaRxNodeCount = 8;
21constexpr size_t kDmaMaxBufferSizePerLinkItem = 4095U;
29 uint32_t reserved24 : 4;
31 uint32_t reserved29 : 1;
39constexpr uint32_t kGdmaOwnerCpu = 0U;
40constexpr uint32_t kGdmaOwnerDma = 1U;
42size_t AlignUp(
size_t value,
size_t align)
48 return ((value + align - 1) / align) * align;
51uintptr_t CacheAddrToNonCache(uintptr_t addr)
53#if SOC_NON_CACHEABLE_OFFSET
54 return addr + SOC_NON_CACHEABLE_OFFSET;
60GdmaLinkItem* LinkItemFromHeadAddr(uintptr_t head_addr)
62 return reinterpret_cast<GdmaLinkItem*
>(CacheAddrToNonCache(head_addr));
65#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE || SOC_PSRAM_DMA_CAPABLE
66extern "C" esp_err_t esp_cache_msync(
void* addr,
size_t size,
int flags);
68constexpr int kCacheSyncFlagUnaligned = (1 << 1);
69constexpr int kCacheSyncFlagDirC2M = (1 << 2);
70constexpr int kCacheSyncFlagDirM2C = (1 << 3);
72bool CacheSyncDmaBuffer(
const void* addr,
size_t size,
bool cache_to_mem)
74 if ((addr ==
nullptr) || (size == 0U))
79#if SOC_PSRAM_DMA_CAPABLE && !SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE
80 if (!esp_ptr_external_ram(addr))
86 int flags = cache_to_mem ? kCacheSyncFlagDirC2M : kCacheSyncFlagDirM2C;
87 flags |= kCacheSyncFlagUnaligned;
89 const esp_err_t ret = esp_cache_msync(
const_cast<void*
>(addr), size, flags);
91 return (ret == ESP_OK) || (ret == ESP_ERR_INVALID_ARG);
99bool IRAM_ATTR ESP32UART::DmaTxEofCallback(gdma_channel_handle_t, gdma_event_data_t*,
102 auto* uart =
static_cast<ESP32UART*
>(user_data);
110bool IRAM_ATTR ESP32UART::DmaTxDescrErrCallback(gdma_channel_handle_t, gdma_event_data_t*,
113 auto* uart =
static_cast<ESP32UART*
>(user_data);
116 uart->HandleDmaTxError();
121bool IRAM_ATTR ESP32UART::DmaRxDoneCallback(gdma_channel_handle_t,
122 gdma_event_data_t* event_data,
125 auto* uart =
static_cast<ESP32UART*
>(user_data);
128 uart->HandleDmaRxDone(event_data);
133bool IRAM_ATTR ESP32UART::DmaRxDescrErrCallback(gdma_channel_handle_t, gdma_event_data_t*,
136 auto* uart =
static_cast<ESP32UART*
>(user_data);
139 uart->HandleDmaRxError();
146 if (dma_backend_enabled_)
151 periph_module_enable(PERIPH_UHCI0_MODULE);
152 periph_module_reset(PERIPH_UHCI0_MODULE);
154 uhci_hal_init(&uhci_hal_, 0);
155 uhci_ll_attach_uart_port(uhci_hal_.dev, uart_num_);
157 uhci_seper_chr_t sep_chr = {};
158 sep_chr.sub_chr_en = 0;
159 uhci_ll_set_seper_chr(uhci_hal_.dev, &sep_chr);
160 uhci_ll_rx_set_eof_mode(uhci_hal_.dev, UHCI_RX_IDLE_EOF);
162 gdma_channel_alloc_config_t tx_cfg = {
163 .sibling_chan =
nullptr,
164 .direction = GDMA_CHANNEL_DIRECTION_TX,
167 if (gdma_new_ahb_channel(&tx_cfg, &tx_dma_channel_) != ESP_OK)
172 if (gdma_connect(tx_dma_channel_, GDMA_MAKE_TRIGGER(GDMA_TRIG_PERIPH_UHCI, 0)) !=
178 gdma_transfer_config_t transfer_cfg = {
179 .max_data_burst_size = 0,
180 .access_ext_mem =
true,
182 if (gdma_config_transfer(tx_dma_channel_, &transfer_cfg) != ESP_OK)
187 size_t tx_int_alignment = 1;
188 size_t tx_ext_alignment = 1;
189 if (gdma_get_alignment_constraints(tx_dma_channel_, &tx_int_alignment,
190 &tx_ext_alignment) != ESP_OK)
194 tx_dma_alignment_ = std::max<size_t>(1, std::max(tx_int_alignment, tx_ext_alignment));
196 gdma_strategy_config_t tx_strategy = {
198 .auto_update_desc =
true,
199 .eof_till_data_popped =
true,
201 if (gdma_apply_strategy(tx_dma_channel_, &tx_strategy) != ESP_OK)
206 gdma_link_list_config_t tx_link_cfg = {
211 tx_dma_buffer_addr_[0] = tx_active_buffer_;
212 tx_dma_buffer_addr_[1] = tx_pending_buffer_;
214 for (
int i = 0; i < 2; ++i)
216 if (gdma_new_link_list(&tx_link_cfg, &tx_dma_links_[i]) != ESP_OK)
221 gdma_buffer_mount_config_t tx_mount = {
222 .buffer = tx_dma_buffer_addr_[i],
223 .buffer_alignment = tx_dma_alignment_,
229 .bypass_buffer_align_check = 0,
233 if (gdma_link_mount_buffers(tx_dma_links_[i], 0, &tx_mount, 1,
nullptr) != ESP_OK)
238 tx_dma_head_addr_[i] = gdma_link_get_head_addr(tx_dma_links_[i]);
239 if (tx_dma_head_addr_[i] == 0U)
245 gdma_tx_event_callbacks_t tx_callbacks = {
246 .on_trans_eof = DmaTxEofCallback,
247 .on_descr_err = DmaTxDescrErrCallback,
249 if (gdma_register_tx_event_callbacks(tx_dma_channel_, &tx_callbacks,
this) != ESP_OK)
254 gdma_channel_alloc_config_t rx_cfg = {
255 .sibling_chan =
nullptr,
256 .direction = GDMA_CHANNEL_DIRECTION_RX,
259 if (gdma_new_ahb_channel(&rx_cfg, &rx_dma_channel_) != ESP_OK)
264 if (gdma_connect(rx_dma_channel_, GDMA_MAKE_TRIGGER(GDMA_TRIG_PERIPH_UHCI, 0)) !=
270 if (gdma_config_transfer(rx_dma_channel_, &transfer_cfg) != ESP_OK)
275 size_t rx_int_alignment = 1;
276 size_t rx_ext_alignment = 1;
277 if (gdma_get_alignment_constraints(rx_dma_channel_, &rx_int_alignment,
278 &rx_ext_alignment) != ESP_OK)
282 rx_dma_alignment_ = std::max<size_t>(1, std::max(rx_int_alignment, rx_ext_alignment));
284 gdma_link_list_config_t rx_link_cfg = {
285 .num_items = kDmaRxNodeCount,
289 if (gdma_new_link_list(&rx_link_cfg, &rx_dma_link_) != ESP_OK)
295 const size_t rx_chunk_target =
296 std::min<size_t>(std::max<size_t>(32, rx_isr_buffer_size_ / kDmaRxNodeCount), 512);
297 rx_dma_chunk_size_ = std::max<size_t>(AlignUp(rx_chunk_target, 4), 32);
298 rx_dma_node_count_ = kDmaRxNodeCount;
299 const size_t rx_storage_alignment = std::max<size_t>(4, rx_dma_alignment_);
300 const size_t rx_storage_bytes =
301 AlignUp(rx_dma_chunk_size_ * rx_dma_node_count_, rx_storage_alignment);
303 rx_dma_storage_ =
static_cast<uint8_t*
>(
304 heap_caps_aligned_alloc(rx_storage_alignment, rx_storage_bytes,
305 MALLOC_CAP_INTERNAL | MALLOC_CAP_DMA | MALLOC_CAP_8BIT));
306 if (rx_dma_storage_ ==
nullptr)
311 std::array<gdma_buffer_mount_config_t, kDmaRxNodeCount> rx_mount = {};
312 for (uint32_t i = 0; i < kDmaRxNodeCount; ++i)
314 rx_mount[i] = gdma_buffer_mount_config_t{
315 .buffer = rx_dma_storage_ + (
static_cast<size_t>(i) * rx_dma_chunk_size_),
316 .buffer_alignment = rx_dma_alignment_,
317 .length = rx_dma_chunk_size_,
322 .bypass_buffer_align_check = 0,
327 if (gdma_link_mount_buffers(rx_dma_link_, 0, rx_mount.data(), kDmaRxNodeCount,
333 gdma_rx_event_callbacks_t rx_callbacks = {
334 .on_recv_eof =
nullptr,
335 .on_descr_err = DmaRxDescrErrCallback,
336 .on_recv_done = DmaRxDoneCallback,
338 if (gdma_register_rx_event_callbacks(rx_dma_channel_, &rx_callbacks,
this) != ESP_OK)
343 if (gdma_reset(rx_dma_channel_) != ESP_OK)
348 if (gdma_start(rx_dma_channel_, gdma_link_get_head_addr(rx_dma_link_)) != ESP_OK)
353 rx_dma_node_index_ = 0;
354 dma_backend_enabled_ =
true;
358bool IRAM_ATTR ESP32UART::StartDmaTx()
360 if ((tx_dma_channel_ ==
nullptr) || !tx_active_valid_)
365 uint8_t*
const active_buffer = tx_active_buffer_;
366 const size_t active_len = tx_active_length_;
367 if ((active_buffer ==
nullptr) || (active_len == 0) ||
368 (active_len > kDmaMaxBufferSizePerLinkItem))
374 if (active_buffer == tx_dma_buffer_addr_[0])
378 else if (active_buffer == tx_dma_buffer_addr_[1])
387 if ((tx_dma_links_[link_index] ==
nullptr) || (tx_dma_head_addr_[link_index] == 0U))
392 auto* desc = LinkItemFromHeadAddr(tx_dma_head_addr_[link_index]);
399 desc->buffer = active_buffer;
400 desc->dw0.size =
static_cast<uint32_t
>(active_len);
401 desc->dw0.length =
static_cast<uint32_t
>(active_len);
402 desc->dw0.err_eof = 0U;
403 desc->dw0.suc_eof = 1U;
404 desc->dw0.owner = kGdmaOwnerDma;
405 desc->next =
nullptr;
406 std::atomic_thread_fence(std::memory_order_release);
408#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE || SOC_PSRAM_DMA_CAPABLE
409 if (!CacheSyncDmaBuffer(active_buffer, active_len,
true))
415 return gdma_start(tx_dma_channel_, tx_dma_head_addr_[link_index]) == ESP_OK;
418void IRAM_ATTR ESP32UART::PushDmaRxData(
size_t recv_size,
bool in_isr)
420 if ((rx_dma_storage_ ==
nullptr) || (rx_dma_chunk_size_ == 0) ||
421 (rx_dma_node_count_ == 0))
426 const size_t max_window = rx_dma_chunk_size_ * rx_dma_node_count_;
427 size_t remaining = std::min(recv_size, max_window);
429 while (remaining > 0)
431 const size_t offset =
static_cast<size_t>(rx_dma_node_index_) * rx_dma_chunk_size_;
432 const size_t chunk = std::min(remaining, rx_dma_chunk_size_);
433 auto* chunk_ptr = rx_dma_storage_ + offset;
435#if SOC_CACHE_INTERNAL_MEM_VIA_L1CACHE || SOC_PSRAM_DMA_CAPABLE
436 if (!CacheSyncDmaBuffer(chunk_ptr, chunk,
false))
442 PushRxBytes(chunk_ptr, chunk, in_isr);
444 rx_dma_node_index_ = (rx_dma_node_index_ + 1U) % rx_dma_node_count_;
448void IRAM_ATTR ESP32UART::HandleDmaRxDone(gdma_event_data_t* event_data)
450 if ((rx_dma_storage_ ==
nullptr) || (rx_dma_chunk_size_ == 0) ||
451 (rx_dma_node_count_ == 0))
456 if ((event_data !=
nullptr) && event_data->flags.abnormal_eof)
462 size_t recv_size = rx_dma_chunk_size_;
463 if ((event_data !=
nullptr) && event_data->flags.normal_eof)
465 const size_t eof_size = gdma_link_count_buffer_size_till_eof(
466 rx_dma_link_,
static_cast<int>(rx_dma_node_index_));
469 recv_size = eof_size;
473 PushDmaRxData(recv_size,
true);
476void IRAM_ATTR ESP32UART::HandleDmaRxError()
478 if ((rx_dma_channel_ ==
nullptr) || (rx_dma_link_ ==
nullptr))
483 gdma_stop(rx_dma_channel_);
484 gdma_reset(rx_dma_channel_);
485 rx_dma_node_index_ = 0;
486 (void)gdma_start(rx_dma_channel_, gdma_link_get_head_addr(rx_dma_link_));
489void IRAM_ATTR ESP32UART::HandleDmaTxError()
491 if (tx_dma_channel_ !=
nullptr)
493 gdma_stop(tx_dma_channel_);
494 gdma_reset(tx_dma_channel_);
@ INIT_ERR
初始化错误 | Initialization error
@ NO_MEM
内存不足 | Insufficient memory
@ FAILED
操作失败 | Operation failed
@ OK
操作成功 | Operation successful