1
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
39
40
41
42
43
44
45
47
48
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
74
75
76
78
79
80
81
82
83
90
96
97
98
99
100
101
102
103
104
105
106
107
113
114
122
123
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
250
251
283
284
288
289
290
291
292
293
294
295
296
297
298
299
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
360
361
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
438
439
440
441
442
443
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
497
498
499
500
501
502
503
504
505
506
507
510
520
521
522
523
541
542
559
560
561
562
563
564
565
566
570
571
572
573
574
575
576
577
578
579
580
583
584
585
586
587
588
589
590
591
592
593
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
632
633
634
635
636
637
638
639
640
641
645
646
650
651
655
656
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
680
683
684
685
686
695
696
706
707
708
709
710
711
712
713
714
715
716
722
723
724
725
726
732
733
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
770
771
772
773
778
779
780
781
782
785
786
787
788
789
790
791
805
806
822
823
824
825
826
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
900
901
902
905
906
910
911
912
916
917
918
919
920
921
928
929
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
959
962
965
966
967
968
969
970
987
988
989
990
991
992
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1023
1026
1027
1028
1029
1030
1031
1034
1035
1040
1041
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1063
1064
1065
1066
1072
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1106
1107
1111
1112
1116
1117
1118
1124
1125
1126
1127
1128
1129
1133
1134
1135
1138
1139
1140
1141
1142
1143
1144
1145
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1188
1189
1190
1204
1205
1213
1214
1215
1216
1217
1218
1224
1225
1226
1227
1228
1229
1230
1231
1235
1236
1237
1238
1242
1243
1247
1248
1249
1250
1251
1252
1256
1257
1258
1259
1260
1261
1262
1263
1266
1267
1268
1275
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1300
1301
1302
1303
1304
1305
1306
1310
1311
1312
1313
1314
1315
1316
1321
1325
1326
1335
1336
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1369
1370
1371
1372
1373
1374
1375
1376
1380
1381
1382
1383
1384
1385
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1529
1530
1531
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1602
1603
1604
1605
1609
1610
1611
1612
1613
1614
1615
1616
1625
1626
1627
1629
1630
1631
1632
1633
1634
1635
1636
1639
1642
1645
1648
1651
1654
1657
1660
1661
1662
1663
1667
1668
1669
1670
1671
1672
1673
1676
1677
1680
1686
1692
1693
1694
1695
1696
1697
1698
1699
1702
1703
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1731
1736
1737
1738
1739
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1766
1770
1771
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1804
1805
1806
1807
1808
1809
1810
1811
1812
1818
1819
1820
1831
1832
1833
1834
1835
1839
1840
1846
1847
1851
1852
1853
1854
1855
1856
1857
1858
1862
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1891
1892
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1908
1909
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1972
1973
1979
1980
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
/* ... */
#include <string.h>
#include <sys/param.h>
#include <sys/lock.h>
#include "esp_types.h"
#include "esp_attr.h"
#include "esp_intr_alloc.h"
#include "esp_log.h"
#include "esp_err.h"
#include "esp_check.h"
#include "freertos/FreeRTOS.h"
#include "freertos/queue.h"
#include "freertos/semphr.h"
#include "freertos/ringbuf.h"
#include "freertos/idf_additions.h"
#include "esp_private/critical_section.h"
#include "hal/uart_hal.h"
#include "hal/gpio_hal.h"
#include "soc/uart_periph.h"
#include "soc/soc_caps.h"
#include "driver/uart.h"
#include "driver/gpio.h"
#include "driver/uart_select.h"
#include "esp_private/esp_clk_tree_common.h"
#include "esp_private/gpio.h"
#include "esp_private/esp_gpio_reserve.h"
#include "esp_private/uart_share_hw_ctrl.h"
#include "esp_clk_tree.h"
#include "sdkconfig.h"
#include "esp_rom_gpio.h"29 includes
#if (SOC_UART_LP_NUM >= 1)
#include "driver/rtc_io.h"
#include "hal/rtc_io_ll.h"
#include "driver/lp_io.h"/* ... */
#endif
#include "clk_ctrl_os.h"
#include "esp_pm.h"
#include "esp_private/sleep_retention.h"
#ifdef CONFIG_UART_ISR_IN_IRAM
#define UART_ISR_ATTR IRAM_ATTR
#define UART_MALLOC_CAPS (MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT)/* ... */
#else
#define UART_ISR_ATTR
#define UART_MALLOC_CAPS MALLOC_CAP_DEFAULT/* ... */
#endif
#define PROTECT_APB (CONFIG_PM_ENABLE)
#define XOFF (0x13)
#define XON (0x11)
static const char *UART_TAG = "uart";
#define UART_EMPTY_THRESH_DEFAULT (10)
#define LP_UART_EMPTY_THRESH_DEFAULT (4)
#define UART_FULL_THRESH_DEFAULT (120)
#define LP_UART_FULL_THRESH_DEFAULT (10)
#define UART_TOUT_THRESH_DEFAULT (10)
#define UART_CLKDIV_FRAG_BIT_WIDTH (3)
#define UART_TX_IDLE_NUM_DEFAULT (0)
#define UART_PATTERN_DET_QLEN_DEFAULT (10)
#define UART_MIN_WAKEUP_THRESH (UART_LL_MIN_WAKEUP_THRESH)9 defines
#if (SOC_UART_LP_NUM >= 1)
#define UART_THRESHOLD_NUM(uart_num, field_name) ((uart_num < SOC_UART_HP_NUM) ? field_name : LP_##field_name)
#define TO_LP_UART_NUM(uart_num) (uart_num - SOC_UART_HP_NUM)/* ... */
#else
#define UART_THRESHOLD_NUM(uart_num, field_name) (field_name)
#endif
#if SOC_UART_SUPPORT_WAKEUP_INT
#define UART_INTR_CONFIG_FLAG ((UART_INTR_RXFIFO_FULL) \
| (UART_INTR_RXFIFO_TOUT) \
| (UART_INTR_RXFIFO_OVF) \
| (UART_INTR_BRK_DET) \
| (UART_INTR_PARITY_ERR)) \
| (UART_INTR_WAKEUP)...
/* ... */#else
#define UART_INTR_CONFIG_FLAG ((UART_INTR_RXFIFO_FULL) \
| (UART_INTR_RXFIFO_TOUT) \
| (UART_INTR_RXFIFO_OVF) \
| (UART_INTR_BRK_DET) \
| (UART_INTR_PARITY_ERR))...
/* ... */#endif
#define UART_ENTER_CRITICAL_SAFE(spinlock) esp_os_enter_critical_safe(spinlock)
#define UART_EXIT_CRITICAL_SAFE(spinlock) esp_os_exit_critical_safe(spinlock)
#define UART_ENTER_CRITICAL_ISR(spinlock) esp_os_enter_critical_isr(spinlock)
#define UART_EXIT_CRITICAL_ISR(spinlock) esp_os_exit_critical_isr(spinlock)
#define UART_ENTER_CRITICAL(spinlock) esp_os_enter_critical(spinlock)
#define UART_EXIT_CRITICAL(spinlock) esp_os_exit_critical(spinlock)
#define UART_IS_MODE_SET(uart_number, mode) ((p_uart_obj[uart_number]->uart_mode == mode))
#define UART_CONTEXT_INIT_DEF(uart_num) { \
.port_id = uart_num, \
.hal.dev = UART_LL_GET_HW(uart_num), \
INIT_CRIT_SECTION_LOCK_IN_STRUCT(spinlock) \
.hw_enabled = false, \
}{...}
8 defines
typedef struct {
uart_event_type_t type;
struct {
int brk_len;
size_t size;
uint8_t data[0];
}{ ... } tx_data;
}{ ... } uart_tx_data_t;
typedef struct {
int wr;
int rd;
int len;
int *data;
}{ ... } uart_pat_rb_t;
typedef struct {
uart_port_t uart_num;
int event_queue_size;
intr_handle_t intr_handle;
uart_mode_t uart_mode;
bool coll_det_flg;
bool rx_always_timeout_flg;
int rx_buffered_len;
int rx_buf_size;
bool rx_buffer_full_flg;
uint8_t *rx_data_buf;
uint8_t rx_stash_len;
uint32_t rx_int_usr_mask;
uart_pat_rb_t rx_pattern_pos;
int tx_buf_size;
bool tx_waiting_fifo;
uint8_t *tx_ptr;
uart_tx_data_t *tx_head;
uint32_t tx_len_tot;
uint32_t tx_len_cur;
uint8_t tx_brk_flg;
uint8_t tx_brk_len;
uint8_t tx_waiting_brk;
uart_select_notif_callback_t uart_select_notif_callback;
QueueHandle_t event_queue;
RingbufHandle_t rx_ring_buf;
RingbufHandle_t tx_ring_buf;
SemaphoreHandle_t rx_mux;
SemaphoreHandle_t tx_mux;
SemaphoreHandle_t tx_fifo_sem;
SemaphoreHandle_t tx_done_sem;
SemaphoreHandle_t tx_brk_sem;
#if PROTECT_APB
esp_pm_lock_handle_t pm_lock;
#endif
}{ ... } uart_obj_t;
typedef struct {
_lock_t mutex;
uart_port_t port_id;
uart_hal_context_t hal;
DECLARE_CRIT_SECTION_LOCK_IN_STRUCT(spinlock)
bool hw_enabled;
}{ ... } uart_context_t;
static uart_obj_t *p_uart_obj[UART_NUM_MAX] = {0};
static uart_context_t uart_context[UART_NUM_MAX] = {
UART_CONTEXT_INIT_DEF(UART_NUM_0),
UART_CONTEXT_INIT_DEF(UART_NUM_1),
#if SOC_UART_HP_NUM > 2
UART_CONTEXT_INIT_DEF(UART_NUM_2),
#endif
#if SOC_UART_HP_NUM > 3
UART_CONTEXT_INIT_DEF(UART_NUM_3),
#endif
#if SOC_UART_HP_NUM > 4
UART_CONTEXT_INIT_DEF(UART_NUM_4),
#endif
#if (SOC_UART_LP_NUM >= 1)
UART_CONTEXT_INIT_DEF(LP_UART_NUM_0),
#endif
}{...};
static portMUX_TYPE uart_selectlock = portMUX_INITIALIZER_UNLOCKED;
#if CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
static esp_err_t uart_create_sleep_retention_link_cb(void *arg);
#endif
static void uart_module_enable(uart_port_t uart_num)
{
_lock_acquire(&(uart_context[uart_num].mutex));
if (uart_context[uart_num].hw_enabled != true) {
if (uart_num < SOC_UART_HP_NUM) {
HP_UART_BUS_CLK_ATOMIC() {
uart_ll_enable_bus_clock(uart_num, true);
}{...}
if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM) {
HP_UART_BUS_CLK_ATOMIC() {
uart_ll_reset_register(uart_num);
}{...}
HP_UART_SRC_CLK_ATOMIC() {
uart_ll_sclk_enable(uart_context[uart_num].hal.dev);
}{...}
}{...}
#if CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM) {
sleep_retention_module_t module = uart_reg_retention_info[uart_num].module;
assert(!sleep_retention_is_module_inited(module));
sleep_retention_module_init_param_t init_param = {
.cbs = {
.create = {
.handle = uart_create_sleep_retention_link_cb,
.arg = &uart_context[uart_num],
}{...},
}{...},
.depends = RETENTION_MODULE_BITMAP_INIT(CLOCK_SYSTEM)
}{...};
if (sleep_retention_module_init(module, &init_param) != ESP_OK) {
ESP_LOGW(UART_TAG, "init sleep retention failed for uart%d, power domain may be turned off during sleep", uart_num);
}{...}
}{...}
#endif/* ... */
}{...}
#if (SOC_UART_LP_NUM >= 1)
else {
LP_UART_BUS_CLK_ATOMIC() {
lp_uart_ll_enable_bus_clock(TO_LP_UART_NUM(uart_num), true);
lp_uart_ll_reset_register(TO_LP_UART_NUM(uart_num));
}{...}
lp_uart_ll_sclk_enable(TO_LP_UART_NUM(uart_num));
}{...}
#endif/* ... */
uart_context[uart_num].hw_enabled = true;
}{...}
_lock_release(&(uart_context[uart_num].mutex));
}{ ... }
static void uart_module_disable(uart_port_t uart_num)
{
_lock_acquire(&(uart_context[uart_num].mutex));
if (uart_context[uart_num].hw_enabled != false) {
if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM && uart_num < SOC_UART_HP_NUM) {
#if CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
sleep_retention_module_t module = uart_reg_retention_info[uart_num].module;
assert(!sleep_retention_is_module_created(module));
if (sleep_retention_is_module_inited(module)) {
sleep_retention_module_deinit(module);
}{...}
#endif/* ... */
HP_UART_SRC_CLK_ATOMIC() {
uart_ll_sclk_disable(uart_context[uart_num].hal.dev);
}{...}
HP_UART_BUS_CLK_ATOMIC() {
uart_ll_enable_bus_clock(uart_num, false);
}{...}
}{...}
#if (SOC_UART_LP_NUM >= 1)
else if (uart_num >= SOC_UART_HP_NUM) {
lp_uart_ll_sclk_disable(TO_LP_UART_NUM(uart_num));
LP_UART_BUS_CLK_ATOMIC() {
lp_uart_ll_enable_bus_clock(TO_LP_UART_NUM(uart_num), false);
}{...}
}{...}
#endif/* ... */
uart_context[uart_num].hw_enabled = false;
}{...}
_lock_release(&(uart_context[uart_num].mutex));
}{ ... }
esp_err_t uart_get_sclk_freq(uart_sclk_t sclk, uint32_t *out_freq_hz)
{
return esp_clk_tree_src_get_freq_hz((soc_module_clk_t)sclk, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, out_freq_hz);
}{ ... }
esp_err_t uart_set_word_length(uart_port_t uart_num, uart_word_length_t data_bit)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((data_bit < UART_DATA_BITS_MAX), ESP_FAIL, UART_TAG, "data bit error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_data_bit_num(&(uart_context[uart_num].hal), data_bit);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_word_length(uart_port_t uart_num, uart_word_length_t *data_bit)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
uart_hal_get_data_bit_num(&(uart_context[uart_num].hal), data_bit);
return ESP_OK;
}{ ... }
esp_err_t uart_set_stop_bits(uart_port_t uart_num, uart_stop_bits_t stop_bit)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((stop_bit < UART_STOP_BITS_MAX), ESP_FAIL, UART_TAG, "stop bit error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_stop_bits(&(uart_context[uart_num].hal), stop_bit);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_stop_bits(uart_port_t uart_num, uart_stop_bits_t *stop_bit)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_get_stop_bits(&(uart_context[uart_num].hal), stop_bit);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_parity(uart_port_t uart_num, uart_parity_t parity_mode)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_parity(&(uart_context[uart_num].hal), parity_mode);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_parity(uart_port_t uart_num, uart_parity_t *parity_mode)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_get_parity(&(uart_context[uart_num].hal), parity_mode);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_baudrate(uart_port_t uart_num, uint32_t baud_rate)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
soc_module_clk_t src_clk;
uint32_t sclk_freq;
uart_hal_get_sclk(&(uart_context[uart_num].hal), &src_clk);
ESP_RETURN_ON_ERROR(esp_clk_tree_src_get_freq_hz(src_clk, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_freq), UART_TAG, "Invalid src_clk");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (uart_num < SOC_UART_HP_NUM) {
HP_UART_SRC_CLK_ATOMIC() {
uart_hal_set_baudrate(&(uart_context[uart_num].hal), baud_rate, sclk_freq);
}{...}
}{...}
#if (SOC_UART_LP_NUM >= 1)
else {
lp_uart_ll_set_baudrate(uart_context[uart_num].hal.dev, baud_rate, sclk_freq);
}{...}
#endif/* ... */
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_baudrate(uart_port_t uart_num, uint32_t *baudrate)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
soc_module_clk_t src_clk;
uint32_t sclk_freq;
uart_hal_get_sclk(&(uart_context[uart_num].hal), &src_clk);
ESP_RETURN_ON_ERROR(esp_clk_tree_src_get_freq_hz(src_clk, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_freq), UART_TAG, "Invalid src_clk");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_get_baudrate(&(uart_context[uart_num].hal), baudrate, sclk_freq);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_line_inverse(uart_port_t uart_num, uint32_t inverse_mask)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_inverse_signal(&(uart_context[uart_num].hal), inverse_mask);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_sw_flow_ctrl(uart_port_t uart_num, bool enable, uint8_t rx_thresh_xon, uint8_t rx_thresh_xoff)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((rx_thresh_xon < UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "rx flow xon thresh error");
ESP_RETURN_ON_FALSE((rx_thresh_xoff < UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "rx flow xoff thresh error");
uart_sw_flowctrl_t sw_flow_ctl = {
.xon_char = XON,
.xoff_char = XOFF,
.xon_thrd = rx_thresh_xon,
.xoff_thrd = rx_thresh_xoff,
}{...};
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_sw_flow_ctrl(&(uart_context[uart_num].hal), &sw_flow_ctl, enable);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_hw_flow_ctrl(uart_port_t uart_num, uart_hw_flowcontrol_t flow_ctrl, uint8_t rx_thresh)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((rx_thresh < UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "rx flow thresh error");
ESP_RETURN_ON_FALSE((flow_ctrl < UART_HW_FLOWCTRL_MAX), ESP_FAIL, UART_TAG, "hw_flowctrl mode error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_hw_flow_ctrl(&(uart_context[uart_num].hal), flow_ctrl, rx_thresh);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_hw_flow_ctrl(uart_port_t uart_num, uart_hw_flowcontrol_t *flow_ctrl)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_get_hw_flow_ctrl(&(uart_context[uart_num].hal), flow_ctrl);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t UART_ISR_ATTR uart_clear_intr_status(uart_port_t uart_num, uint32_t clr_mask)
{
ESP_RETURN_ON_FALSE_ISR((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), clr_mask);
return ESP_OK;
}{ ... }
esp_err_t uart_enable_intr_mask(uart_port_t uart_num, uint32_t enable_mask)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
/* ... */
p_uart_obj[uart_num]->rx_int_usr_mask |= enable_mask;
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), enable_mask);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), enable_mask);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
/* ... */
static esp_err_t uart_reenable_intr_mask(uart_port_t uart_num, uint32_t enable_mask)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
/* ... */
uint32_t mask = p_uart_obj[uart_num]->rx_int_usr_mask & enable_mask;
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), mask);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), mask);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_disable_intr_mask(uart_port_t uart_num, uint32_t disable_mask)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
p_uart_obj[uart_num]->rx_int_usr_mask &= ~disable_mask;
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), disable_mask);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
static esp_err_t uart_pattern_link_free(uart_port_t uart_num)
{
int *pdata = NULL;
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (p_uart_obj[uart_num]->rx_pattern_pos.data != NULL) {
pdata = p_uart_obj[uart_num]->rx_pattern_pos.data;
p_uart_obj[uart_num]->rx_pattern_pos.data = NULL;
p_uart_obj[uart_num]->rx_pattern_pos.wr = 0;
p_uart_obj[uart_num]->rx_pattern_pos.rd = 0;
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
free(pdata);
return ESP_OK;
}{ ... }
static esp_err_t UART_ISR_ATTR uart_pattern_enqueue(uart_port_t uart_num, int pos)
{
esp_err_t ret = ESP_OK;
uart_pat_rb_t *p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
int next = p_pos->wr + 1;
if (next >= p_pos->len) {
next = 0;
}{...}
if (next == p_pos->rd) {
#ifndef CONFIG_UART_ISR_IN_IRAM
ESP_EARLY_LOGW(UART_TAG, "Fail to enqueue pattern position, pattern queue is full.");
#endif
ret = ESP_FAIL;
}{...} else {
p_pos->data[p_pos->wr] = pos;
p_pos->wr = next;
ret = ESP_OK;
}{...}
return ret;
}{ ... }
static esp_err_t uart_pattern_dequeue(uart_port_t uart_num)
{
if (p_uart_obj[uart_num]->rx_pattern_pos.data == NULL) {
return ESP_ERR_INVALID_STATE;
}{...} else {
esp_err_t ret = ESP_OK;
uart_pat_rb_t *p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
if (p_pos->rd == p_pos->wr) {
ret = ESP_FAIL;
}{...} else {
p_pos->rd++;
}{...}
if (p_pos->rd >= p_pos->len) {
p_pos->rd = 0;
}{...}
return ret;
}{...}
}{ ... }
static esp_err_t uart_pattern_queue_update(uart_port_t uart_num, int diff_len)
{
uart_pat_rb_t *p_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
int rd = p_pos->rd;
while (rd != p_pos->wr) {
p_pos->data[rd] -= diff_len;
int rd_rec = rd;
rd ++;
if (rd >= p_pos->len) {
rd = 0;
}{...}
if (p_pos->data[rd_rec] < 0) {
p_pos->rd = rd;
}{...}
}{...}
return ESP_OK;
}{ ... }
int uart_pattern_pop_pos(uart_port_t uart_num)
{
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), (-1), UART_TAG, "uart driver error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_pat_rb_t *pat_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
int pos = -1;
if (pat_pos != NULL && pat_pos->rd != pat_pos->wr) {
pos = pat_pos->data[pat_pos->rd];
uart_pattern_dequeue(uart_num);
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return pos;
}{ ... }
int uart_pattern_get_pos(uart_port_t uart_num)
{
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), (-1), UART_TAG, "uart driver error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_pat_rb_t *pat_pos = &p_uart_obj[uart_num]->rx_pattern_pos;
int pos = -1;
if (pat_pos != NULL && pat_pos->rd != pat_pos->wr) {
pos = pat_pos->data[pat_pos->rd];
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return pos;
}{ ... }
esp_err_t uart_pattern_queue_reset(uart_port_t uart_num, int queue_length)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_ERR_INVALID_STATE, UART_TAG, "uart driver error");
int *pdata = (int *)heap_caps_malloc(queue_length * sizeof(int), UART_MALLOC_CAPS);
if (pdata == NULL) {
return ESP_ERR_NO_MEM;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
int *ptmp = p_uart_obj[uart_num]->rx_pattern_pos.data;
p_uart_obj[uart_num]->rx_pattern_pos.data = pdata;
p_uart_obj[uart_num]->rx_pattern_pos.len = queue_length;
p_uart_obj[uart_num]->rx_pattern_pos.rd = 0;
p_uart_obj[uart_num]->rx_pattern_pos.wr = 0;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
free(ptmp);
return ESP_OK;
}{ ... }
esp_err_t uart_enable_pattern_det_baud_intr(uart_port_t uart_num, char pattern_chr, uint8_t chr_num, int chr_tout, int post_idle, int pre_idle)
{
ESP_RETURN_ON_FALSE(uart_num < UART_NUM_MAX, ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE(chr_tout >= 0 && chr_tout <= UART_THRESHOLD_NUM(uart_num, UART_RX_GAP_TOUT_V), ESP_FAIL, UART_TAG, "uart pattern set error\n");
ESP_RETURN_ON_FALSE(post_idle >= 0 && post_idle <= UART_THRESHOLD_NUM(uart_num, UART_POST_IDLE_NUM_V), ESP_FAIL, UART_TAG, "uart pattern set error\n");
ESP_RETURN_ON_FALSE(pre_idle >= 0 && pre_idle <= UART_THRESHOLD_NUM(uart_num, UART_PRE_IDLE_NUM_V), ESP_FAIL, UART_TAG, "uart pattern set error\n");
uart_at_cmd_t at_cmd = {0};
at_cmd.cmd_char = pattern_chr;
at_cmd.char_num = chr_num;
#if CONFIG_IDF_TARGET_ESP32
uint32_t apb_clk_freq = 0;
uint32_t uart_baud = 0;
uint32_t uart_div = 0;
uart_get_baudrate(uart_num, &uart_baud);
esp_clk_tree_src_get_freq_hz((soc_module_clk_t)UART_SCLK_APB, ESP_CLK_TREE_SRC_FREQ_PRECISION_EXACT, &apb_clk_freq);
uart_div = apb_clk_freq / uart_baud;
at_cmd.gap_tout = chr_tout * uart_div;
at_cmd.pre_idle = pre_idle * uart_div;
at_cmd.post_idle = post_idle * uart_div;/* ... */
#else
at_cmd.gap_tout = chr_tout;
at_cmd.pre_idle = pre_idle;
at_cmd.post_idle = post_idle;/* ... */
#endif
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_at_cmd_char(&(uart_context[uart_num].hal), &at_cmd);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_disable_pattern_det_intr(uart_port_t uart_num)
{
return uart_disable_intr_mask(uart_num, UART_INTR_CMD_CHAR_DET);
}{ ... }
esp_err_t uart_enable_rx_intr(uart_port_t uart_num)
{
return uart_enable_intr_mask(uart_num, UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
}{ ... }
esp_err_t uart_disable_rx_intr(uart_port_t uart_num)
{
return uart_disable_intr_mask(uart_num, UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
}{ ... }
esp_err_t uart_disable_tx_intr(uart_port_t uart_num)
{
return uart_disable_intr_mask(uart_num, UART_INTR_TXFIFO_EMPTY);
}{ ... }
esp_err_t uart_enable_tx_intr(uart_port_t uart_num, int enable, int thresh)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((thresh < UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "empty intr threshold error");
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), thresh);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
static bool uart_try_set_iomux_pin(uart_port_t uart_num, int io_num, uint32_t idx)
{
const uart_periph_sig_t *upin = &uart_periph_signal[uart_num].pins[idx];
/* ... */
if (upin->iomux_func == -1 || upin->default_gpio == -1 || upin->default_gpio != io_num) {
return false;
}{...}
assert(upin->iomux_func != -1);
if (uart_num < SOC_UART_HP_NUM) {
gpio_iomux_out(io_num, upin->iomux_func, false);
/* ... */
if (upin->input) {
gpio_iomux_in(io_num, upin->signal);
}{...}
}{...}
#if (SOC_UART_LP_NUM >= 1) && (SOC_RTCIO_PIN_COUNT >= 1)
else {
if (upin->input) {
rtc_gpio_set_direction(io_num, RTC_GPIO_MODE_INPUT_ONLY);
}{...} else {
rtc_gpio_set_direction(io_num, RTC_GPIO_MODE_OUTPUT_ONLY);
}{...}
rtc_gpio_init(io_num);
rtc_gpio_iomux_func_sel(io_num, upin->iomux_func);
}{...}
#endif/* ... */
return true;
}{ ... }
esp_err_t uart_set_pin(uart_port_t uart_num, int tx_io_num, int rx_io_num, int rts_io_num, int cts_io_num)
{
ESP_RETURN_ON_FALSE((uart_num >= 0), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
if (uart_num < SOC_UART_HP_NUM) {
ESP_RETURN_ON_FALSE((tx_io_num < 0 || (GPIO_IS_VALID_OUTPUT_GPIO(tx_io_num))), ESP_FAIL, UART_TAG, "tx_io_num error");
ESP_RETURN_ON_FALSE((rx_io_num < 0 || (GPIO_IS_VALID_GPIO(rx_io_num))), ESP_FAIL, UART_TAG, "rx_io_num error");
ESP_RETURN_ON_FALSE((rts_io_num < 0 || (GPIO_IS_VALID_OUTPUT_GPIO(rts_io_num))), ESP_FAIL, UART_TAG, "rts_io_num error");
ESP_RETURN_ON_FALSE((cts_io_num < 0 || (GPIO_IS_VALID_GPIO(cts_io_num))), ESP_FAIL, UART_TAG, "cts_io_num error");
}{...}
#if (SOC_UART_LP_NUM >= 1)
else {
#if !SOC_LP_GPIO_MATRIX_SUPPORTED
const uart_periph_sig_t *pins = uart_periph_signal[uart_num].pins;
ESP_RETURN_ON_FALSE((tx_io_num < 0 || (tx_io_num == pins[SOC_UART_TX_PIN_IDX].default_gpio)), ESP_FAIL, UART_TAG, "tx_io_num error");
ESP_RETURN_ON_FALSE((rx_io_num < 0 || (rx_io_num == pins[SOC_UART_RX_PIN_IDX].default_gpio)), ESP_FAIL, UART_TAG, "rx_io_num error");
ESP_RETURN_ON_FALSE((rts_io_num < 0 || (rts_io_num == pins[SOC_UART_RTS_PIN_IDX].default_gpio)), ESP_FAIL, UART_TAG, "rts_io_num error");
ESP_RETURN_ON_FALSE((cts_io_num < 0 || (cts_io_num == pins[SOC_UART_CTS_PIN_IDX].default_gpio)), ESP_FAIL, UART_TAG, "cts_io_num error");/* ... */
#else
ESP_RETURN_ON_FALSE((tx_io_num < 0 || rtc_gpio_is_valid_gpio(tx_io_num)), ESP_FAIL, UART_TAG, "tx_io_num error");
ESP_RETURN_ON_FALSE((rx_io_num < 0 || rtc_gpio_is_valid_gpio(rx_io_num)), ESP_FAIL, UART_TAG, "rx_io_num error");
ESP_RETURN_ON_FALSE((rts_io_num < 0 || rtc_gpio_is_valid_gpio(rts_io_num)), ESP_FAIL, UART_TAG, "rts_io_num error");
ESP_RETURN_ON_FALSE((cts_io_num < 0 || rtc_gpio_is_valid_gpio(cts_io_num)), ESP_FAIL, UART_TAG, "cts_io_num error");
/* ... */
#endif
}{...}
#endif/* ... */
uint64_t io_reserve_mask = 0;
io_reserve_mask |= (tx_io_num > 0 ? BIT64(tx_io_num) : 0);
io_reserve_mask |= (rx_io_num > 0 ? BIT64(rx_io_num) : 0);
io_reserve_mask |= (rts_io_num > 0 ? BIT64(rts_io_num) : 0);
io_reserve_mask |= (cts_io_num > 0 ? BIT64(cts_io_num) : 0);
bool tx_rx_same_io = (tx_io_num == rx_io_num);
if (tx_io_num >= 0 && (tx_rx_same_io || !uart_try_set_iomux_pin(uart_num, tx_io_num, SOC_UART_TX_PIN_IDX))) {
if (uart_num < SOC_UART_HP_NUM) {
gpio_func_sel(tx_io_num, PIN_FUNC_GPIO);
esp_rom_gpio_connect_out_signal(tx_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_TX_PIN_IDX), 0, 0);
}{...}
#if SOC_LP_GPIO_MATRIX_SUPPORTED
else {
rtc_gpio_init(tx_io_num);
lp_gpio_connect_out_signal(tx_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_TX_PIN_IDX), 0, 0);
}{...}
#endif/* ... */
}{...}
if (rx_io_num >= 0 && (tx_rx_same_io || !uart_try_set_iomux_pin(uart_num, rx_io_num, SOC_UART_RX_PIN_IDX))) {
io_reserve_mask &= ~BIT64(rx_io_num);
if (uart_num < SOC_UART_HP_NUM) {
gpio_func_sel(rx_io_num, PIN_FUNC_GPIO);
gpio_input_enable(rx_io_num);
esp_rom_gpio_connect_in_signal(rx_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_RX_PIN_IDX), 0);
}{...}
#if SOC_LP_GPIO_MATRIX_SUPPORTED
else {
rtc_gpio_mode_t mode = (tx_rx_same_io ? RTC_GPIO_MODE_INPUT_OUTPUT : RTC_GPIO_MODE_INPUT_ONLY);
rtc_gpio_set_direction(rx_io_num, mode);
if (!tx_rx_same_io) {
rtc_gpio_init(rx_io_num);
}{...}
lp_gpio_connect_in_signal(rx_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_RX_PIN_IDX), 0);
}{...}
#endif/* ... */
}{...}
if (rts_io_num >= 0 && !uart_try_set_iomux_pin(uart_num, rts_io_num, SOC_UART_RTS_PIN_IDX)) {
if (uart_num < SOC_UART_HP_NUM) {
gpio_func_sel(rts_io_num, PIN_FUNC_GPIO);
esp_rom_gpio_connect_out_signal(rts_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_RTS_PIN_IDX), 0, 0);
}{...}
#if SOC_LP_GPIO_MATRIX_SUPPORTED
else {
rtc_gpio_init(rts_io_num);
lp_gpio_connect_out_signal(rts_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_RTS_PIN_IDX), 0, 0);
}{...}
#endif/* ... */
}{...}
if (cts_io_num >= 0 && !uart_try_set_iomux_pin(uart_num, cts_io_num, SOC_UART_CTS_PIN_IDX)) {
io_reserve_mask &= ~BIT64(cts_io_num);
if (uart_num < SOC_UART_HP_NUM) {
gpio_func_sel(cts_io_num, PIN_FUNC_GPIO);
gpio_pullup_en(cts_io_num);
gpio_input_enable(cts_io_num);
esp_rom_gpio_connect_in_signal(cts_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_CTS_PIN_IDX), 0);
}{...}
#if SOC_LP_GPIO_MATRIX_SUPPORTED
else {
rtc_gpio_set_direction(cts_io_num, RTC_GPIO_MODE_INPUT_ONLY);
rtc_gpio_init(cts_io_num);
lp_gpio_connect_in_signal(cts_io_num, UART_PERIPH_SIGNAL(uart_num, SOC_UART_CTS_PIN_IDX), 0);
}{...}
#endif/* ... */
}{...}
uint64_t old_busy_mask = esp_gpio_reserve(io_reserve_mask);
uint64_t conflict_mask = old_busy_mask & io_reserve_mask;
while (conflict_mask > 0) {
uint8_t pos = __builtin_ctzll(conflict_mask);
conflict_mask &= ~(1ULL << pos);
ESP_LOGW(UART_TAG, "GPIO %d is not usable, maybe used by others", pos);
}{...}
return ESP_OK;
}{ ... }
esp_err_t uart_set_rts(uart_port_t uart_num, int level)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((!uart_hal_is_hw_rts_en(&(uart_context[uart_num].hal))), ESP_FAIL, UART_TAG, "disable hw flowctrl before using sw control");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_rts(&(uart_context[uart_num].hal), level);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_dtr(uart_port_t uart_num, int level)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_dtr(&(uart_context[uart_num].hal), level);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_tx_idle_num(uart_port_t uart_num, uint16_t idle_num)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((idle_num <= UART_THRESHOLD_NUM(uart_num, UART_TX_IDLE_NUM_V)), ESP_FAIL, UART_TAG, "uart idle num error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_tx_idle_num(&(uart_context[uart_num].hal), idle_num);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_param_config(uart_port_t uart_num, const uart_config_t *uart_config)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((uart_config), ESP_FAIL, UART_TAG, "param null");
ESP_RETURN_ON_FALSE((uart_config->rx_flow_ctrl_thresh < UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "rx flow thresh error");
ESP_RETURN_ON_FALSE((uart_config->flow_ctrl < UART_HW_FLOWCTRL_MAX), ESP_FAIL, UART_TAG, "hw_flowctrl mode error");
ESP_RETURN_ON_FALSE((uart_config->data_bits < UART_DATA_BITS_MAX), ESP_FAIL, UART_TAG, "data bit error");
bool allow_pd __attribute__((unused)) = (uart_config->flags.allow_pd || uart_config->flags.backup_before_sleep);
#if !SOC_UART_SUPPORT_SLEEP_RETENTION
ESP_RETURN_ON_FALSE(allow_pd == 0, ESP_ERR_NOT_SUPPORTED, UART_TAG, "not able to power down in light sleep");
#endif
uart_module_enable(uart_num);
#if SOC_UART_SUPPORT_SLEEP_RETENTION && CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM && uart_num < SOC_UART_HP_NUM) {
_lock_acquire(&(uart_context[uart_num].mutex));
sleep_retention_module_t module = uart_reg_retention_info[uart_num].module;
if (allow_pd && !sleep_retention_is_module_created(module)) {
if (sleep_retention_is_module_inited(module)) {
if (sleep_retention_module_allocate(module) != ESP_OK) {
ESP_LOGW(UART_TAG, "create retention module failed, power domain can't turn off");
}{...}
}{...} else {
ESP_LOGW(UART_TAG, "retention module not initialized first, unable to create retention module");
}{...}
}{...} else if (!allow_pd && sleep_retention_is_module_created(module)) {
assert(sleep_retention_is_module_inited(module));
sleep_retention_module_free(module);
}{...}
_lock_release(&(uart_context[uart_num].mutex));
}{...}
#endif/* ... */
soc_module_clk_t uart_sclk_sel = 0;
if (uart_num < SOC_UART_HP_NUM) {
uart_sclk_sel = (soc_module_clk_t)((uart_config->source_clk) ? uart_config->source_clk : UART_SCLK_DEFAULT);
}{...}
#if (SOC_UART_LP_NUM >= 1)
else {
uart_sclk_sel = (soc_module_clk_t)((uart_config->lp_source_clk) ? uart_config->lp_source_clk : LP_UART_SCLK_DEFAULT);
}{...}
#endif/* ... */
#if SOC_UART_SUPPORT_RTC_CLK
if (uart_sclk_sel == (soc_module_clk_t)UART_SCLK_RTC) {
periph_rtc_dig_clk8m_enable();
}{...}
#endif/* ... */
uint32_t sclk_freq;
ESP_RETURN_ON_ERROR(esp_clk_tree_src_get_freq_hz(uart_sclk_sel, ESP_CLK_TREE_SRC_FREQ_PRECISION_CACHED, &sclk_freq), UART_TAG, "Invalid src_clk");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_init(&(uart_context[uart_num].hal), uart_num);
if (uart_num < SOC_UART_HP_NUM) {
esp_clk_tree_enable_src((soc_module_clk_t)uart_sclk_sel, true);
HP_UART_SRC_CLK_ATOMIC() {
uart_hal_set_sclk(&(uart_context[uart_num].hal), uart_sclk_sel);
uart_hal_set_baudrate(&(uart_context[uart_num].hal), uart_config->baud_rate, sclk_freq);
}{...}
}{...}
#if (SOC_UART_LP_NUM >= 1)
else {
LP_UART_SRC_CLK_ATOMIC() {
lp_uart_ll_set_source_clk(uart_context[uart_num].hal.dev, (soc_periph_lp_uart_clk_src_t)uart_sclk_sel);
}{...}
lp_uart_ll_set_baudrate(uart_context[uart_num].hal.dev, uart_config->baud_rate, sclk_freq);
}{...}
#endif/* ... */
uart_hal_set_parity(&(uart_context[uart_num].hal), uart_config->parity);
uart_hal_set_data_bit_num(&(uart_context[uart_num].hal), uart_config->data_bits);
uart_hal_set_stop_bits(&(uart_context[uart_num].hal), uart_config->stop_bits);
uart_hal_set_tx_idle_num(&(uart_context[uart_num].hal), UART_TX_IDLE_NUM_DEFAULT);
uart_hal_set_hw_flow_ctrl(&(uart_context[uart_num].hal), uart_config->flow_ctrl, uart_config->rx_flow_ctrl_thresh);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
uart_hal_txfifo_rst(&(uart_context[uart_num].hal));
return ESP_OK;
}{ ... }
esp_err_t uart_intr_config(uart_port_t uart_num, const uart_intr_config_t *intr_conf)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((intr_conf), ESP_FAIL, UART_TAG, "param null");
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (intr_conf->intr_enable_mask & UART_INTR_RXFIFO_TOUT) {
uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), intr_conf->rx_timeout_thresh);
}{...} else {
uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), 0);
}{...}
if (intr_conf->intr_enable_mask & UART_INTR_RXFIFO_FULL) {
uart_hal_set_rxfifo_full_thr(&(uart_context[uart_num].hal), intr_conf->rxfifo_full_thresh);
}{...}
if (intr_conf->intr_enable_mask & UART_INTR_TXFIFO_EMPTY) {
uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), intr_conf->txfifo_empty_intr_thresh);
}{...}
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), intr_conf->intr_enable_mask);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
static int UART_ISR_ATTR uart_find_pattern_from_last(uint8_t *buf, int length, uint8_t pat_chr, uint8_t pat_num)
{
int cnt = 0;
int len = length;
while (len >= 0) {
if (buf[len] == pat_chr) {
cnt++;
}{...} else {
cnt = 0;
}{...}
if (cnt >= pat_num) {
break;
}{...}
len --;
}{...}
return len;
}{ ... }
static uint32_t UART_ISR_ATTR uart_enable_tx_write_fifo(uart_port_t uart_num, const uint8_t *pbuf, uint32_t len)
{
uint32_t sent_len = 0;
UART_ENTER_CRITICAL_SAFE(&(uart_context[uart_num].spinlock));
if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
uart_hal_set_rts(&(uart_context[uart_num].hal), 0);
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
}{...}
uart_hal_write_txfifo(&(uart_context[uart_num].hal), pbuf, len, &sent_len);
UART_EXIT_CRITICAL_SAFE(&(uart_context[uart_num].spinlock));
return sent_len;
}{ ... }
static void UART_ISR_ATTR uart_rx_intr_handler_default(void *param)
{
uart_obj_t *p_uart = (uart_obj_t *) param;
uint8_t uart_num = p_uart->uart_num;
int rx_fifo_len = 0;
uint32_t uart_intr_status = 0;
BaseType_t HPTaskAwoken = 0;
bool need_yield = false;
static uint8_t pat_flg = 0;
BaseType_t sent = pdFALSE;
while (1) {
uart_intr_status = uart_hal_get_intsts_mask(&(uart_context[uart_num].hal));
if (uart_intr_status == 0) {
break;
}{...}
uart_event_t uart_event = {
.type = UART_EVENT_MAX,
}{...};
if (uart_intr_status & UART_INTR_TXFIFO_EMPTY) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
if (p_uart->tx_waiting_brk) {
continue;
}{...}
if (p_uart->tx_waiting_fifo == true && p_uart->tx_buf_size == 0) {
p_uart->tx_waiting_fifo = false;
xSemaphoreGiveFromISR(p_uart->tx_fifo_sem, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...} else {
if (p_uart->tx_buf_size == 0) {
continue;
}{...}
bool en_tx_flg = false;
uint32_t tx_fifo_rem = uart_hal_get_txfifo_len(&(uart_context[uart_num].hal));
while (tx_fifo_rem) {
if (p_uart->tx_len_tot == 0 || p_uart->tx_ptr == NULL || p_uart->tx_len_cur == 0) {
size_t size;
p_uart->tx_head = (uart_tx_data_t *) xRingbufferReceiveFromISR(p_uart->tx_ring_buf, &size);
if (p_uart->tx_head) {
if (p_uart->tx_len_tot == 0) {
p_uart->tx_ptr = NULL;
p_uart->tx_len_tot = p_uart->tx_head->tx_data.size;
if (p_uart->tx_head->type == UART_DATA_BREAK) {
p_uart->tx_brk_flg = 1;
p_uart->tx_brk_len = p_uart->tx_head->tx_data.brk_len;
}{...}
vRingbufferReturnItemFromISR(p_uart->tx_ring_buf, p_uart->tx_head, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...} else if (p_uart->tx_ptr == NULL) {
p_uart->tx_ptr = (uint8_t *)p_uart->tx_head;
en_tx_flg = true;
p_uart->tx_len_cur = size;
}{...}
}{...} else {
break;
}{...}
}{...}
if (p_uart->tx_len_tot > 0 && p_uart->tx_ptr && p_uart->tx_len_cur > 0) {
uint32_t send_len = uart_enable_tx_write_fifo(uart_num, (const uint8_t *) p_uart->tx_ptr,
MIN(p_uart->tx_len_cur, tx_fifo_rem));
p_uart->tx_ptr += send_len;
p_uart->tx_len_tot -= send_len;
p_uart->tx_len_cur -= send_len;
tx_fifo_rem -= send_len;
if (p_uart->tx_len_cur == 0) {
vRingbufferReturnItemFromISR(p_uart->tx_ring_buf, p_uart->tx_head, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
p_uart->tx_head = NULL;
p_uart->tx_ptr = NULL;
if (p_uart->tx_len_tot == 0 && p_uart->tx_brk_flg == 1) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_tx_break(&(uart_context[uart_num].hal), p_uart->tx_brk_len);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
p_uart->tx_waiting_brk = 1;
en_tx_flg = false;
}{...} else {
en_tx_flg = true;
}{...}
UART_ENTER_CRITICAL_ISR(&uart_selectlock);
if (p_uart->uart_select_notif_callback) {
p_uart->uart_select_notif_callback(uart_num, UART_SELECT_WRITE_NOTIF, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
UART_EXIT_CRITICAL_ISR(&uart_selectlock);
}{...} else {
en_tx_flg = true;
}{...}
}{...}
}{...}
if (en_tx_flg) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
}{...}
}{...}
}{...} else if ((uart_intr_status & UART_INTR_RXFIFO_TOUT)
|| (uart_intr_status & UART_INTR_RXFIFO_FULL)
|| (uart_intr_status & UART_INTR_CMD_CHAR_DET)
) {
if (pat_flg == 1) {
uart_intr_status |= UART_INTR_CMD_CHAR_DET;
pat_flg = 0;
}{...}
if (p_uart->rx_buffer_full_flg == false) {
rx_fifo_len = uart_hal_get_rxfifo_len(&(uart_context[uart_num].hal));
if ((p_uart_obj[uart_num]->rx_always_timeout_flg) && !(uart_intr_status & UART_INTR_RXFIFO_TOUT)) {
rx_fifo_len--;
}{...}
uart_hal_read_rxfifo(&(uart_context[uart_num].hal), p_uart->rx_data_buf, &rx_fifo_len);
uint8_t pat_chr = 0;
uint8_t pat_num = 0;
int pat_idx = -1;
uart_hal_get_at_cmd_char(&(uart_context[uart_num].hal), &pat_chr, &pat_num);
if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
uart_event.type = UART_PATTERN_DET;
uart_event.size = rx_fifo_len;
pat_idx = uart_find_pattern_from_last(p_uart->rx_data_buf, rx_fifo_len - 1, pat_chr, pat_num);
}{...} else {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
uart_event.type = UART_DATA;
uart_event.size = rx_fifo_len;
uart_event.timeout_flag = (uart_intr_status & UART_INTR_RXFIFO_TOUT) ? true : false;
}{...}
p_uart->rx_stash_len = rx_fifo_len;
sent = xRingbufferSendFromISR(p_uart->rx_ring_buf, p_uart->rx_data_buf, p_uart->rx_stash_len, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
if (sent == pdFALSE) {
p_uart->rx_buffer_full_flg = true;
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
if (uart_event.type == UART_PATTERN_DET) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
if (rx_fifo_len < pat_num) {
uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len - (pat_num - rx_fifo_len));
}{...} else {
uart_pattern_enqueue(uart_num,
pat_idx <= -1 ?
p_uart->rx_buffered_len + p_uart->rx_stash_len :
p_uart->rx_buffered_len + pat_idx);
}{...}
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
sent = xQueueSendFromISR(p_uart->event_queue, (void *)&uart_event, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
if ((p_uart->event_queue != NULL) && (sent == pdFALSE)) {
#ifndef CONFIG_UART_ISR_IN_IRAM
ESP_EARLY_LOGV(UART_TAG, "UART event queue full");
#endif
}{...}
}{...}
uart_event.type = UART_BUFFER_FULL;
}{...} else {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
if (rx_fifo_len < pat_num) {
uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len - (pat_num - rx_fifo_len));
}{...} else if (pat_idx >= 0) {
uart_pattern_enqueue(uart_num, p_uart->rx_buffered_len + pat_idx);
}{...}
}{...}
p_uart->rx_buffered_len += p_uart->rx_stash_len;
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
}{...}
if (uart_event.type == UART_DATA) {
UART_ENTER_CRITICAL_ISR(&uart_selectlock);
if (p_uart->uart_select_notif_callback) {
p_uart->uart_select_notif_callback(uart_num, UART_SELECT_READ_NOTIF, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
UART_EXIT_CRITICAL_ISR(&uart_selectlock);
}{...}
}{...} else {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
uart_event.type = UART_PATTERN_DET;
uart_event.size = rx_fifo_len;
pat_flg = 1;
}{...}
}{...}
}{...} else if (uart_intr_status & UART_INTR_RXFIFO_OVF) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
UART_ENTER_CRITICAL_ISR(&uart_selectlock);
if (p_uart->uart_select_notif_callback) {
p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
UART_EXIT_CRITICAL_ISR(&uart_selectlock);
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_OVF);
uart_event.type = UART_FIFO_OVF;
}{...} else if (uart_intr_status & UART_INTR_BRK_DET) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_BRK_DET);
uart_event.type = UART_BREAK;
}{...} else if (uart_intr_status & UART_INTR_FRAM_ERR) {
UART_ENTER_CRITICAL_ISR(&uart_selectlock);
if (p_uart->uart_select_notif_callback) {
p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
UART_EXIT_CRITICAL_ISR(&uart_selectlock);
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_FRAM_ERR);
uart_event.type = UART_FRAME_ERR;
}{...} else if (uart_intr_status & UART_INTR_PARITY_ERR) {
UART_ENTER_CRITICAL_ISR(&uart_selectlock);
if (p_uart->uart_select_notif_callback) {
p_uart->uart_select_notif_callback(uart_num, UART_SELECT_ERROR_NOTIF, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
UART_EXIT_CRITICAL_ISR(&uart_selectlock);
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_PARITY_ERR);
uart_event.type = UART_PARITY_ERR;
}{...} else if (uart_intr_status & UART_INTR_TX_BRK_DONE) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_tx_break(&(uart_context[uart_num].hal), 0);
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
if (p_uart->tx_brk_flg == 1) {
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TXFIFO_EMPTY);
}{...}
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
if (p_uart->tx_brk_flg == 1) {
p_uart->tx_brk_flg = 0;
p_uart->tx_waiting_brk = 0;
}{...} else {
xSemaphoreGiveFromISR(p_uart->tx_brk_sem, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
}{...} else if (uart_intr_status & UART_INTR_TX_BRK_IDLE) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_IDLE);
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_IDLE);
}{...} else if (uart_intr_status & UART_INTR_CMD_CHAR_DET) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_CMD_CHAR_DET);
uart_event.type = UART_PATTERN_DET;
}{...} else if ((uart_intr_status & UART_INTR_RS485_PARITY_ERR)
|| (uart_intr_status & UART_INTR_RS485_FRM_ERR)
|| (uart_intr_status & UART_INTR_RS485_CLASH)) {
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
p_uart_obj[uart_num]->coll_det_flg = true;
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_RS485_CLASH | UART_INTR_RS485_FRM_ERR | UART_INTR_RS485_PARITY_ERR);
uart_event.type = UART_EVENT_MAX;
}{...} else if (uart_intr_status & UART_INTR_TX_DONE) {
if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX) && uart_hal_is_tx_idle(&(uart_context[uart_num].hal)) != true) {
uart_event.type = UART_EVENT_MAX;
}{...} else {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
UART_ENTER_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
if (UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX)) {
uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
uart_hal_set_rts(&(uart_context[uart_num].hal), 1);
}{...}
UART_EXIT_CRITICAL_ISR(&(uart_context[uart_num].spinlock));
xSemaphoreGiveFromISR(p_uart_obj[uart_num]->tx_done_sem, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
}{...}
}{...}
#if SOC_UART_SUPPORT_WAKEUP_INT
else if (uart_intr_status & UART_INTR_WAKEUP) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_WAKEUP);
uart_event.type = UART_WAKEUP;
}{...}
#endif/* ... */
else {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), uart_intr_status);
uart_event.type = UART_EVENT_MAX;
}{...}
if (uart_event.type != UART_EVENT_MAX && p_uart->event_queue) {
sent = xQueueSendFromISR(p_uart->event_queue, (void *)&uart_event, &HPTaskAwoken);
need_yield |= (HPTaskAwoken == pdTRUE);
if (sent == pdFALSE) {
#ifndef CONFIG_UART_ISR_IN_IRAM
ESP_EARLY_LOGV(UART_TAG, "UART event queue full");
#endif
}{...}
}{...}
}{...}
if (need_yield) {
portYIELD_FROM_ISR();
}{...}
}{ ... }
esp_err_t uart_wait_tx_done(uart_port_t uart_num, TickType_t ticks_to_wait)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_FAIL, UART_TAG, "uart driver error");
BaseType_t res;
TickType_t ticks_start = xTaskGetTickCount();
res = xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (TickType_t)ticks_to_wait);
if (res == pdFALSE) {
return ESP_ERR_TIMEOUT;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
bool is_rs485_mode = UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX);
bool disabled = !(uart_hal_get_intr_ena_status(&(uart_context[uart_num].hal)) & UART_INTR_TX_DONE);
assert(!(is_rs485_mode &&
disabled &&
uart_hal_get_intraw_mask(&(uart_context[uart_num].hal)) & UART_INTR_TX_DONE));
if (disabled && !is_rs485_mode) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
xSemaphoreTake(p_uart_obj[uart_num]->tx_done_sem, 0);
esp_rom_delay_us(2);
if (uart_hal_is_tx_idle(&(uart_context[uart_num].hal))) {
xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
return ESP_OK;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_DONE);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
TickType_t ticks_end = xTaskGetTickCount();
if (ticks_end - ticks_start > ticks_to_wait) {
ticks_to_wait = 0;
}{...} else {
ticks_to_wait = ticks_to_wait - (ticks_end - ticks_start);
}{...}
#if PROTECT_APB
esp_pm_lock_acquire(p_uart_obj[uart_num]->pm_lock);
#endif
res = xSemaphoreTake(p_uart_obj[uart_num]->tx_done_sem, (TickType_t)ticks_to_wait);
#if PROTECT_APB
esp_pm_lock_release(p_uart_obj[uart_num]->pm_lock);
#endif
xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
return (res == pdFALSE) ? ESP_ERR_TIMEOUT : ESP_OK;
}{ ... }
int uart_tx_chars(uart_port_t uart_num, const char *buffer, uint32_t len)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), (-1), UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), (-1), UART_TAG, "uart driver error");
ESP_RETURN_ON_FALSE(buffer, (-1), UART_TAG, "buffer null");
if (len == 0) {
return 0;
}{...}
int tx_len = 0;
xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (TickType_t)portMAX_DELAY);
tx_len = (int)uart_enable_tx_write_fifo(uart_num, (const uint8_t *) buffer, len);
xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
return tx_len;
}{ ... }
static int uart_tx_all(uart_port_t uart_num, const char *src, size_t size, bool brk_en, int brk_len)
{
if (size == 0) {
return 0;
}{...}
size_t original_size = size;
xSemaphoreTake(p_uart_obj[uart_num]->tx_mux, (TickType_t)portMAX_DELAY);
#if PROTECT_APB
esp_pm_lock_acquire(p_uart_obj[uart_num]->pm_lock);
#endif
p_uart_obj[uart_num]->coll_det_flg = false;
if (p_uart_obj[uart_num]->tx_buf_size > 0) {
size_t max_size = xRingbufferGetMaxItemSize(p_uart_obj[uart_num]->tx_ring_buf);
int offset = 0;
uart_tx_data_t evt;
evt.tx_data.size = size;
evt.tx_data.brk_len = brk_len;
if (brk_en) {
evt.type = UART_DATA_BREAK;
}{...} else {
evt.type = UART_DATA;
}{...}
xRingbufferSend(p_uart_obj[uart_num]->tx_ring_buf, (void *) &evt, sizeof(uart_tx_data_t), portMAX_DELAY);
while (size > 0) {
size_t send_size = size > max_size / 2 ? max_size / 2 : size;
xRingbufferSend(p_uart_obj[uart_num]->tx_ring_buf, (void *)(src + offset), send_size, portMAX_DELAY);
size -= send_size;
offset += send_size;
uart_enable_tx_intr(uart_num, 1, UART_THRESHOLD_NUM(uart_num, UART_EMPTY_THRESH_DEFAULT));
}{...}
}{...} else {
while (size) {
if (pdTRUE == xSemaphoreTake(p_uart_obj[uart_num]->tx_fifo_sem, (TickType_t)portMAX_DELAY)) {
uint32_t sent = uart_enable_tx_write_fifo(uart_num, (const uint8_t *) src, size);
if (sent < size) {
p_uart_obj[uart_num]->tx_waiting_fifo = true;
uart_enable_tx_intr(uart_num, 1, UART_THRESHOLD_NUM(uart_num, UART_EMPTY_THRESH_DEFAULT));
}{...}
size -= sent;
src += sent;
}{...}
}{...}
if (brk_en) {
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_tx_break(&(uart_context[uart_num].hal), brk_len);
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_TX_BRK_DONE);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
xSemaphoreTake(p_uart_obj[uart_num]->tx_brk_sem, (TickType_t)portMAX_DELAY);
}{...}
xSemaphoreGive(p_uart_obj[uart_num]->tx_fifo_sem);
}{...}
#if PROTECT_APB
esp_pm_lock_release(p_uart_obj[uart_num]->pm_lock);
#endif
xSemaphoreGive(p_uart_obj[uart_num]->tx_mux);
return original_size;
}{ ... }
int uart_write_bytes(uart_port_t uart_num, const void *src, size_t size)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), (-1), UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num] != NULL), (-1), UART_TAG, "uart driver error");
ESP_RETURN_ON_FALSE(src, (-1), UART_TAG, "buffer null");
return uart_tx_all(uart_num, src, size, 0, 0);
}{ ... }
int uart_write_bytes_with_break(uart_port_t uart_num, const void *src, size_t size, int brk_len)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), (-1), UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), (-1), UART_TAG, "uart driver error");
ESP_RETURN_ON_FALSE((size > 0), (-1), UART_TAG, "uart size error");
ESP_RETURN_ON_FALSE((src), (-1), UART_TAG, "uart data null");
ESP_RETURN_ON_FALSE((brk_len > 0 && brk_len < 256), (-1), UART_TAG, "break_num error");
return uart_tx_all(uart_num, src, size, 1, brk_len);
}{ ... }
static bool uart_check_buf_full(uart_port_t uart_num)
{
if (p_uart_obj[uart_num]->rx_buffer_full_flg) {
BaseType_t res = xRingbufferSend(p_uart_obj[uart_num]->rx_ring_buf, p_uart_obj[uart_num]->rx_data_buf, p_uart_obj[uart_num]->rx_stash_len, 0);
if (res == pdTRUE) {
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
p_uart_obj[uart_num]->rx_buffered_len += p_uart_obj[uart_num]->rx_stash_len;
p_uart_obj[uart_num]->rx_buffer_full_flg = false;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
/* ... */
uart_reenable_intr_mask(p_uart_obj[uart_num]->uart_num, UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
return true;
}{...}
}{...}
return false;
}{ ... }
int uart_read_bytes(uart_port_t uart_num, void *buf, uint32_t length, TickType_t ticks_to_wait)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), (-1), UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((buf), (-1), UART_TAG, "uart data null");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), (-1), UART_TAG, "uart driver error");
uint8_t *data = NULL;
size_t size = 0;
size_t copy_len = 0;
if (xSemaphoreTake(p_uart_obj[uart_num]->rx_mux, (TickType_t)ticks_to_wait) != pdTRUE) {
return -1;
}{...}
while (length) {
data = (uint8_t *) xRingbufferReceiveUpTo(p_uart_obj[uart_num]->rx_ring_buf, &size, (TickType_t) ticks_to_wait, length);
if (!data) {
if (uart_check_buf_full(uart_num)) {
continue;
}{...} else {
break;
}{...}
}{...}
memcpy((uint8_t *)buf + copy_len, data, size);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
p_uart_obj[uart_num]->rx_buffered_len -= size;
uart_pattern_queue_update(uart_num, size);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
copy_len += size;
length -= size;
vRingbufferReturnItem(p_uart_obj[uart_num]->rx_ring_buf, data);
uart_check_buf_full(uart_num);
}{...}
xSemaphoreGive(p_uart_obj[uart_num]->rx_mux);
return copy_len;
}{ ... }
esp_err_t uart_get_buffered_data_len(uart_port_t uart_num, size_t *size)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_FAIL, UART_TAG, "uart driver error");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
*size = p_uart_obj[uart_num]->rx_buffered_len;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_tx_buffer_free_size(uart_port_t uart_num, size_t *size)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_ERR_INVALID_ARG, UART_TAG, "uart driver error");
ESP_RETURN_ON_FALSE((size != NULL), ESP_ERR_INVALID_ARG, UART_TAG, "arg pointer is NULL");
*size = p_uart_obj[uart_num]->tx_buf_size - p_uart_obj[uart_num]->tx_len_tot;
return ESP_OK;
}{ ... }
esp_err_t uart_flush(uart_port_t uart_num) __attribute__((alias("uart_flush_input")));
esp_err_t uart_flush_input(uart_port_t uart_num)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_FAIL, UART_TAG, "uart driver error");
uart_obj_t *p_uart = p_uart_obj[uart_num];
uint8_t *data;
size_t size;
xSemaphoreTake(p_uart->rx_mux, (TickType_t)portMAX_DELAY);
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
while (true) {
data = (uint8_t *) xRingbufferReceive(p_uart->rx_ring_buf, &size, (TickType_t) 0);
if (data == NULL) {
bool error = false;
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (p_uart_obj[uart_num]->rx_buffered_len != 0) {
p_uart_obj[uart_num]->rx_buffered_len = 0;
error = true;
}{...}
p_uart_obj[uart_num]->rx_buffer_full_flg = false;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
if (error) {
ESP_LOGE(UART_TAG, "rx_buffered_len error");
}{...}
break;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
p_uart_obj[uart_num]->rx_buffered_len -= size;
uart_pattern_queue_update(uart_num, size);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
vRingbufferReturnItem(p_uart->rx_ring_buf, data);
if (p_uart_obj[uart_num]->rx_buffer_full_flg) {
BaseType_t res = xRingbufferSend(p_uart_obj[uart_num]->rx_ring_buf, p_uart_obj[uart_num]->rx_data_buf, p_uart_obj[uart_num]->rx_stash_len, 0);
if (res == pdTRUE) {
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
p_uart_obj[uart_num]->rx_buffered_len += p_uart_obj[uart_num]->rx_stash_len;
p_uart_obj[uart_num]->rx_buffer_full_flg = false;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
}{...}
}{...}
}{...}
uart_hal_rxfifo_rst(&(uart_context[uart_num].hal));
/* ... */
uart_reenable_intr_mask(uart_num, UART_INTR_RXFIFO_TOUT | UART_INTR_RXFIFO_FULL);
xSemaphoreGive(p_uart->rx_mux);
return ESP_OK;
}{ ... }
static void uart_free_driver_obj(uart_obj_t *uart_obj)
{
if (uart_obj->tx_fifo_sem) {
vSemaphoreDeleteWithCaps(uart_obj->tx_fifo_sem);
}{...}
if (uart_obj->tx_done_sem) {
vSemaphoreDeleteWithCaps(uart_obj->tx_done_sem);
}{...}
if (uart_obj->tx_brk_sem) {
vSemaphoreDeleteWithCaps(uart_obj->tx_brk_sem);
}{...}
if (uart_obj->tx_mux) {
vSemaphoreDeleteWithCaps(uart_obj->tx_mux);
}{...}
if (uart_obj->rx_mux) {
vSemaphoreDeleteWithCaps(uart_obj->rx_mux);
}{...}
if (uart_obj->event_queue) {
vQueueDeleteWithCaps(uart_obj->event_queue);
}{...}
if (uart_obj->rx_ring_buf) {
vRingbufferDeleteWithCaps(uart_obj->rx_ring_buf);
}{...}
if (uart_obj->tx_ring_buf) {
vRingbufferDeleteWithCaps(uart_obj->tx_ring_buf);
}{...}
heap_caps_free(uart_obj->rx_data_buf);
#if PROTECT_APB
if (uart_obj->pm_lock) {
esp_pm_lock_delete(uart_obj->pm_lock);
}{...}
#endif/* ... */
heap_caps_free(uart_obj);
}{ ... }
static uart_obj_t *uart_alloc_driver_obj(uart_port_t uart_num, int event_queue_size, int tx_buffer_size, int rx_buffer_size)
{
uart_obj_t *uart_obj = heap_caps_calloc(1, sizeof(uart_obj_t), UART_MALLOC_CAPS);
if (!uart_obj) {
return NULL;
}{...}
uart_obj->rx_data_buf = heap_caps_calloc(UART_HW_FIFO_LEN(uart_num), sizeof(uint32_t), UART_MALLOC_CAPS);
if (!uart_obj->rx_data_buf) {
goto err;
}{...}
if (event_queue_size > 0) {
uart_obj->event_queue = xQueueCreateWithCaps(event_queue_size, sizeof(uart_event_t), UART_MALLOC_CAPS);
if (!uart_obj->event_queue) {
goto err;
}{...}
}{...}
if (tx_buffer_size > 0) {
uart_obj->tx_ring_buf = xRingbufferCreateWithCaps(tx_buffer_size, RINGBUF_TYPE_NOSPLIT, UART_MALLOC_CAPS);
if (!uart_obj->tx_ring_buf) {
goto err;
}{...}
}{...}
uart_obj->rx_ring_buf = xRingbufferCreateWithCaps(rx_buffer_size, RINGBUF_TYPE_BYTEBUF, UART_MALLOC_CAPS);
uart_obj->tx_mux = xSemaphoreCreateMutexWithCaps(UART_MALLOC_CAPS);
uart_obj->rx_mux = xSemaphoreCreateMutexWithCaps(UART_MALLOC_CAPS);
uart_obj->tx_brk_sem = xSemaphoreCreateBinaryWithCaps(UART_MALLOC_CAPS);
uart_obj->tx_done_sem = xSemaphoreCreateBinaryWithCaps(UART_MALLOC_CAPS);
uart_obj->tx_fifo_sem = xSemaphoreCreateBinaryWithCaps(UART_MALLOC_CAPS);
if (!uart_obj->rx_ring_buf || !uart_obj->rx_mux || !uart_obj->tx_mux || !uart_obj->tx_brk_sem ||
!uart_obj->tx_done_sem || !uart_obj->tx_fifo_sem) {
goto err;
}{...}
#if PROTECT_APB
if (esp_pm_lock_create(ESP_PM_APB_FREQ_MAX, 0, "uart_driver",
&uart_obj->pm_lock) != ESP_OK) {
goto err;
}{...}
#endif/* ... */
return uart_obj;
err:
uart_free_driver_obj(uart_obj);
return NULL;
}{ ... }
esp_err_t uart_driver_install(uart_port_t uart_num, int rx_buffer_size, int tx_buffer_size, int event_queue_size, QueueHandle_t *uart_queue, int intr_alloc_flags)
{
esp_err_t ret;
#ifdef CONFIG_ESP_SYSTEM_GDBSTUB_RUNTIME
ESP_RETURN_ON_FALSE((uart_num != CONFIG_ESP_CONSOLE_UART_NUM), ESP_FAIL, UART_TAG, "UART used by GDB-stubs! Please disable GDB in menuconfig.");
#endif
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((rx_buffer_size > UART_HW_FIFO_LEN(uart_num)), ESP_FAIL, UART_TAG, "uart rx buffer length error");
ESP_RETURN_ON_FALSE((tx_buffer_size > UART_HW_FIFO_LEN(uart_num)) || (tx_buffer_size == 0), ESP_FAIL, UART_TAG, "uart tx buffer length error");
#if CONFIG_UART_ISR_IN_IRAM
if ((intr_alloc_flags & ESP_INTR_FLAG_IRAM) == 0) {
ESP_LOGI(UART_TAG, "ESP_INTR_FLAG_IRAM flag not set while CONFIG_UART_ISR_IN_IRAM is enabled, flag updated");
intr_alloc_flags |= ESP_INTR_FLAG_IRAM;
}{...}
#else/* ... */
if ((intr_alloc_flags & ESP_INTR_FLAG_IRAM) != 0) {
ESP_LOGW(UART_TAG, "ESP_INTR_FLAG_IRAM flag is set while CONFIG_UART_ISR_IN_IRAM is not enabled, flag updated");
intr_alloc_flags &= ~ESP_INTR_FLAG_IRAM;
}{...}
#endif/* ... */
if (p_uart_obj[uart_num] == NULL) {
p_uart_obj[uart_num] = uart_alloc_driver_obj(uart_num, event_queue_size, tx_buffer_size, rx_buffer_size);
if (p_uart_obj[uart_num] == NULL) {
ESP_LOGE(UART_TAG, "UART driver malloc error");
return ESP_FAIL;
}{...}
p_uart_obj[uart_num]->uart_num = uart_num;
p_uart_obj[uart_num]->uart_mode = UART_MODE_UART;
p_uart_obj[uart_num]->coll_det_flg = false;
p_uart_obj[uart_num]->rx_always_timeout_flg = false;
p_uart_obj[uart_num]->event_queue_size = event_queue_size;
p_uart_obj[uart_num]->tx_ptr = NULL;
p_uart_obj[uart_num]->tx_head = NULL;
p_uart_obj[uart_num]->tx_len_tot = 0;
p_uart_obj[uart_num]->tx_brk_flg = 0;
p_uart_obj[uart_num]->tx_brk_len = 0;
p_uart_obj[uart_num]->tx_waiting_brk = 0;
p_uart_obj[uart_num]->rx_buffered_len = 0;
p_uart_obj[uart_num]->rx_buffer_full_flg = false;
p_uart_obj[uart_num]->tx_waiting_fifo = false;
p_uart_obj[uart_num]->rx_int_usr_mask = UART_INTR_RXFIFO_FULL | UART_INTR_RXFIFO_TOUT;
p_uart_obj[uart_num]->tx_buf_size = tx_buffer_size;
p_uart_obj[uart_num]->uart_select_notif_callback = NULL;
xSemaphoreGive(p_uart_obj[uart_num]->tx_fifo_sem);
uart_pattern_queue_reset(uart_num, UART_PATTERN_DET_QLEN_DEFAULT);
if (uart_queue) {
*uart_queue = p_uart_obj[uart_num]->event_queue;
ESP_LOGI(UART_TAG, "queue free spaces: %" PRIu32, (uint32_t)uxQueueSpacesAvailable(p_uart_obj[uart_num]->event_queue));
}{...}
}{...} else {
ESP_LOGE(UART_TAG, "UART driver already installed");
return ESP_FAIL;
}{...}
uart_intr_config_t uart_intr = {
.intr_enable_mask = UART_INTR_CONFIG_FLAG,
.rxfifo_full_thresh = UART_THRESHOLD_NUM(uart_num, UART_FULL_THRESH_DEFAULT),
.rx_timeout_thresh = UART_TOUT_THRESH_DEFAULT,
.txfifo_empty_intr_thresh = UART_THRESHOLD_NUM(uart_num, UART_EMPTY_THRESH_DEFAULT),
}{...};
uart_module_enable(uart_num);
uart_hal_disable_intr_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
uart_hal_clr_intsts_mask(&(uart_context[uart_num].hal), UART_LL_INTR_MASK);
ret = esp_intr_alloc(uart_periph_signal[uart_num].irq, intr_alloc_flags,
uart_rx_intr_handler_default, p_uart_obj[uart_num],
&p_uart_obj[uart_num]->intr_handle);
ESP_GOTO_ON_ERROR(ret, err, UART_TAG, "Could not allocate an interrupt for UART");
ret = uart_intr_config(uart_num, &uart_intr);
ESP_GOTO_ON_ERROR(ret, err, UART_TAG, "Could not configure the interrupt for UART");
return ret;
err:
uart_driver_delete(uart_num);
return ret;
}{ ... }
esp_err_t uart_driver_delete(uart_port_t uart_num)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_FAIL, UART_TAG, "uart_num error");
if (p_uart_obj[uart_num] == NULL) {
ESP_LOGI(UART_TAG, "ALREADY NULL");
return ESP_OK;
}{...}
esp_intr_free(p_uart_obj[uart_num]->intr_handle);
uart_disable_rx_intr(uart_num);
uart_disable_tx_intr(uart_num);
uart_pattern_link_free(uart_num);
uart_free_driver_obj(p_uart_obj[uart_num]);
p_uart_obj[uart_num] = NULL;
#if SOC_UART_SUPPORT_RTC_CLK
soc_module_clk_t sclk = 0;
uart_hal_get_sclk(&(uart_context[uart_num].hal), &sclk);
if (sclk == (soc_module_clk_t)UART_SCLK_RTC) {
periph_rtc_dig_clk8m_disable();
}{...}
#endif/* ... */
#if SOC_UART_SUPPORT_SLEEP_RETENTION && CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
if (uart_num != CONFIG_ESP_CONSOLE_UART_NUM && uart_num < SOC_UART_HP_NUM) {
sleep_retention_module_t module = uart_reg_retention_info[uart_num].module;
_lock_acquire(&(uart_context[uart_num].mutex));
if (sleep_retention_is_module_created(module)) {
assert(sleep_retention_is_module_inited(module));
sleep_retention_module_free(module);
}{...}
_lock_release(&(uart_context[uart_num].mutex));
}{...}
#endif/* ... */
uart_module_disable(uart_num);
return ESP_OK;
}{ ... }
bool uart_is_driver_installed(uart_port_t uart_num)
{
return uart_num < UART_NUM_MAX && (p_uart_obj[uart_num] != NULL);
}{ ... }
void uart_set_select_notif_callback(uart_port_t uart_num, uart_select_notif_callback_t uart_select_notif_callback)
{
if (uart_num < UART_NUM_MAX && p_uart_obj[uart_num]) {
p_uart_obj[uart_num]->uart_select_notif_callback = (uart_select_notif_callback_t) uart_select_notif_callback;
}{...}
}{ ... }
portMUX_TYPE *uart_get_selectlock(void)
{
return &uart_selectlock;
}{ ... }
esp_err_t uart_set_mode(uart_port_t uart_num, uart_mode_t mode)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_ERR_INVALID_STATE, UART_TAG, "uart driver error");
if ((mode == UART_MODE_RS485_COLLISION_DETECT) || (mode == UART_MODE_RS485_APP_CTRL)
|| (mode == UART_MODE_RS485_HALF_DUPLEX)) {
ESP_RETURN_ON_FALSE((!uart_hal_is_hw_rts_en(&(uart_context[uart_num].hal))), ESP_ERR_INVALID_ARG, UART_TAG,
"disable hw flowctrl before using RS485 mode");
}{...}
if (uart_num >= SOC_UART_HP_NUM) {
ESP_RETURN_ON_FALSE((mode == UART_MODE_UART), ESP_ERR_INVALID_ARG, UART_TAG, "LP_UART can only be in normal UART mode");
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_mode(&(uart_context[uart_num].hal), mode);
if (mode == UART_MODE_RS485_COLLISION_DETECT) {
p_uart_obj[uart_num]->coll_det_flg = false;
uart_hal_ena_intr_mask(&(uart_context[uart_num].hal), UART_INTR_RXFIFO_TOUT
| UART_INTR_RXFIFO_FULL
| UART_INTR_RS485_CLASH
| UART_INTR_RS485_FRM_ERR
| UART_INTR_RS485_PARITY_ERR);
}{...}
p_uart_obj[uart_num]->uart_mode = mode;
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_rx_full_threshold(uart_port_t uart_num, int threshold)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((threshold < UART_THRESHOLD_NUM(uart_num, UART_RXFIFO_FULL_THRHD_V)) && (threshold > 0), ESP_ERR_INVALID_ARG, UART_TAG,
"rx fifo full threshold value error");
if (p_uart_obj[uart_num] == NULL) {
ESP_LOGE(UART_TAG, "call uart_driver_install API first");
return ESP_ERR_INVALID_STATE;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (uart_hal_get_intr_ena_status(&(uart_context[uart_num].hal)) & UART_INTR_RXFIFO_FULL) {
uart_hal_set_rxfifo_full_thr(&(uart_context[uart_num].hal), threshold);
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_tx_empty_threshold(uart_port_t uart_num, int threshold)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((threshold < UART_THRESHOLD_NUM(uart_num, UART_TXFIFO_EMPTY_THRHD_V)) && (threshold > 0), ESP_ERR_INVALID_ARG, UART_TAG,
"tx fifo empty threshold value error");
if (p_uart_obj[uart_num] == NULL) {
ESP_LOGE(UART_TAG, "call uart_driver_install API first");
return ESP_ERR_INVALID_STATE;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
if (uart_hal_get_intr_ena_status(&(uart_context[uart_num].hal)) & UART_INTR_TXFIFO_EMPTY) {
uart_hal_set_txfifo_empty_thr(&(uart_context[uart_num].hal), threshold);
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_set_rx_timeout(uart_port_t uart_num, const uint8_t tout_thresh)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
uint16_t tout_max_thresh = uart_hal_get_max_rx_timeout_thrd(&(uart_context[uart_num].hal));
if (tout_thresh > tout_max_thresh) {
ESP_LOGE(UART_TAG, "tout_thresh = %d > maximum value = %d", tout_thresh, tout_max_thresh);
return ESP_ERR_INVALID_ARG;
}{...}
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_rx_timeout(&(uart_context[uart_num].hal), tout_thresh);
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_collision_flag(uart_port_t uart_num, bool *collision_flag)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((p_uart_obj[uart_num]), ESP_FAIL, UART_TAG, "uart driver error");
ESP_RETURN_ON_FALSE((collision_flag != NULL), ESP_ERR_INVALID_ARG, UART_TAG, "wrong parameter pointer");
ESP_RETURN_ON_FALSE((UART_IS_MODE_SET(uart_num, UART_MODE_RS485_HALF_DUPLEX) || UART_IS_MODE_SET(uart_num, UART_MODE_RS485_COLLISION_DETECT)),
ESP_ERR_INVALID_ARG, UART_TAG, "wrong mode");
*collision_flag = p_uart_obj[uart_num]->coll_det_flg;
return ESP_OK;
}{ ... }
esp_err_t uart_set_wakeup_threshold(uart_port_t uart_num, int wakeup_threshold)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((wakeup_threshold <= UART_THRESHOLD_NUM(uart_num, UART_ACTIVE_THRESHOLD_V) && wakeup_threshold >= UART_MIN_WAKEUP_THRESH), ESP_ERR_INVALID_ARG, UART_TAG,
"wakeup_threshold out of bounds");
UART_ENTER_CRITICAL(&(uart_context[uart_num].spinlock));
uart_hal_set_wakeup_thrd(&(uart_context[uart_num].hal), wakeup_threshold);
HP_UART_PAD_CLK_ATOMIC() {
uart_ll_enable_pad_sleep_clock(uart_context[uart_num].hal.dev, true);
}{...}
UART_EXIT_CRITICAL(&(uart_context[uart_num].spinlock));
return ESP_OK;
}{ ... }
esp_err_t uart_get_wakeup_threshold(uart_port_t uart_num, int *out_wakeup_threshold)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
ESP_RETURN_ON_FALSE((out_wakeup_threshold != NULL), ESP_ERR_INVALID_ARG, UART_TAG, "argument is NULL");
uart_hal_get_wakeup_thrd(&(uart_context[uart_num].hal), (uint32_t *)out_wakeup_threshold);
return ESP_OK;
}{ ... }
esp_err_t uart_wait_tx_idle_polling(uart_port_t uart_num)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
if (uart_ll_is_enabled(uart_num)) {
while (!uart_hal_is_tx_idle(&(uart_context[uart_num].hal)));
}{...}
return ESP_OK;
}{ ... }
esp_err_t uart_set_loop_back(uart_port_t uart_num, bool loop_back_en)
{
ESP_RETURN_ON_FALSE((uart_num < UART_NUM_MAX), ESP_ERR_INVALID_ARG, UART_TAG, "uart_num error");
uart_hal_set_loop_back(&(uart_context[uart_num].hal), loop_back_en);
return ESP_OK;
}{ ... }
void uart_set_always_rx_timeout(uart_port_t uart_num, bool always_rx_timeout)
{
uint16_t rx_tout = uart_hal_get_rx_tout_thr(&(uart_context[uart_num].hal));
if (rx_tout) {
p_uart_obj[uart_num]->rx_always_timeout_flg = always_rx_timeout;
}{...} else {
p_uart_obj[uart_num]->rx_always_timeout_flg = false;
}{...}
}{ ... }
#if CONFIG_PM_POWER_DOWN_PERIPHERAL_IN_LIGHT_SLEEP
static esp_err_t uart_create_sleep_retention_link_cb(void *arg)
{
#if SOC_UART_SUPPORT_SLEEP_RETENTION
uart_context_t *group = (uart_context_t *)arg;
uart_port_t uart_num = group->port_id;
sleep_retention_module_t module = uart_reg_retention_info[uart_num].module;
esp_err_t err = sleep_retention_entries_create(uart_reg_retention_info[uart_num].regdma_entry_array,
uart_reg_retention_info[uart_num].array_size,
REGDMA_LINK_PRI_UART, module);
ESP_RETURN_ON_ERROR(err, UART_TAG, "create retention link failed");/* ... */
#endif
return ESP_OK;
}{...}
/* ... */#endif...