@@ -20,6 +20,13 @@ LOG_MODULE_REGISTER(nxp_sdma);
20
20
AT_NONCACHEABLE_SECTION_ALIGN (static sdma_context_data_t
21
21
sdma_contexts [FSL_FEATURE_SDMA_MODULE_CHANNEL ], 4 );
22
22
23
+ enum sdma_channel_state {
24
+ SDMA_CHAN_STATE_INIT = 0 ,
25
+ SDMA_CHAN_STATE_PREPARED ,
26
+ SDMA_CHAN_STATE_ACTIVE ,
27
+ SDMA_CHAN_STATE_READY ,
28
+ };
29
+
23
30
struct sdma_dev_cfg {
24
31
SDMAARM_Type * base ;
25
32
void (* irq_config )(void );
@@ -32,6 +39,7 @@ struct sdma_channel_data {
32
39
uint32_t direction ;
33
40
uint32_t index ;
34
41
const struct device * dev ;
42
+ enum sdma_channel_state state ;
35
43
sdma_buffer_descriptor_t * bd_pool ; /*pre-allocated list of BD used for transfer */
36
44
uint32_t bd_count ; /* number of bd */
37
45
uint32_t capacity ; /* total transfer capacity for this channel */
@@ -281,9 +289,14 @@ static int dma_nxp_sdma_config(const struct device *dev, uint32_t channel,
281
289
return - EINVAL ;
282
290
}
283
291
284
- dma_nxp_sdma_channel_init (dev , channel );
285
-
286
292
chan_data = & dev_data -> chan [channel ];
293
+
294
+ if (chan_data -> state == SDMA_CHAN_STATE_ACTIVE ) {
295
+ LOG_ERR ("sdma_config() cannot configure channel while active" );
296
+ return - EINVAL ;
297
+ }
298
+
299
+ dma_nxp_sdma_channel_init (dev , channel );
287
300
chan_data -> dev = dev ;
288
301
chan_data -> direction = config -> channel_direction ;
289
302
@@ -330,6 +343,7 @@ static int dma_nxp_sdma_config(const struct device *dev, uint32_t channel,
330
343
chan_data -> transfer_cfg .isEventIgnore = false;
331
344
chan_data -> transfer_cfg .isSoftTriggerIgnore = false;
332
345
SDMA_SubmitTransfer (& chan_data -> handle , & chan_data -> transfer_cfg );
346
+ chan_data -> state = SDMA_CHAN_STATE_PREPARED ;
333
347
334
348
return 0 ;
335
349
}
@@ -347,8 +361,16 @@ static int dma_nxp_sdma_start(const struct device *dev, uint32_t channel)
347
361
348
362
chan_data = & dev_data -> chan [channel ];
349
363
364
+ if (chan_data -> state != SDMA_CHAN_STATE_READY &&
365
+ chan_data -> state != SDMA_CHAN_STATE_PREPARED ) {
366
+ LOG_ERR ("%s: invalid state %d" , __func__ , chan_data -> state );
367
+ return - EINVAL ;
368
+ }
369
+
370
+
350
371
SDMA_SetChannelPriority (dev_cfg -> base , channel , DMA_NXP_SDMA_CHAN_DEFAULT_PRIO );
351
372
SDMA_StartChannelSoftware (dev_cfg -> base , channel );
373
+ chan_data -> state = SDMA_CHAN_STATE_ACTIVE ;
352
374
353
375
return 0 ;
354
376
}
@@ -366,6 +388,10 @@ static int dma_nxp_sdma_stop(const struct device *dev, uint32_t channel)
366
388
chan_data = & dev_data -> chan [channel ];
367
389
368
390
SDMA_StopTransfer (& chan_data -> handle );
391
+
392
+ /* channel is ready, but not active anymore */
393
+ chan_data -> state = SDMA_CHAN_STATE_READY ;
394
+
369
395
return 0 ;
370
396
}
371
397
@@ -395,6 +421,12 @@ static int dma_nxp_sdma_reload(const struct device *dev, uint32_t channel, uint3
395
421
return 0 ;
396
422
}
397
423
424
+ /* allow reload only for active channels */
425
+ if (chan_data -> state != SDMA_CHAN_STATE_ACTIVE ) {
426
+ LOG_ERR ("%s: invalid state %d" , __func__ , chan_data -> state );
427
+ return - EINVAL ;
428
+ }
429
+
398
430
if (chan_data -> direction == MEMORY_TO_PERIPHERAL ) {
399
431
dma_nxp_sdma_produce (chan_data , size );
400
432
} else {
@@ -439,6 +471,7 @@ static bool sdma_channel_filter(const struct device *dev, int chan_id, void *par
439
471
dev_data -> chan [chan_id ].event_source = * ((int * )param );
440
472
dev_data -> chan [chan_id ].index = chan_id ;
441
473
dev_data -> chan [chan_id ].capacity = 0 ;
474
+ dev_data -> chan [chan_id ].state = SDMA_CHAN_STATE_INIT ;
442
475
443
476
return true;
444
477
}
0 commit comments