@@ -20,6 +20,15 @@ LOG_MODULE_REGISTER(nxp_sdma);
20
20
AT_NONCACHEABLE_SECTION_ALIGN (static sdma_context_data_t
21
21
sdma_contexts [FSL_FEATURE_SDMA_MODULE_CHANNEL ], 4 );
22
22
23
+ enum sdma_channel_state {
24
+ SDMA_CHAN_STATE_INIT = 0 ,
25
+ SDMA_CHAN_STATE_CONFIGURED ,
26
+ SDMA_CHAN_STATE_STARTED ,
27
+ SDMA_CHAN_STATE_STOPPED ,
28
+ SDMA_CHAN_STATE_SUSPENDED ,
29
+ SDMA_CHAN_STATE_RELEASING ,
30
+ };
31
+
23
32
struct sdma_dev_cfg {
24
33
SDMAARM_Type * base ;
25
34
void (* irq_config )(void );
@@ -32,6 +41,7 @@ struct sdma_channel_data {
32
41
uint32_t direction ;
33
42
uint32_t index ;
34
43
const struct device * dev ;
44
+ enum sdma_channel_state state ;
35
45
sdma_buffer_descriptor_t * bd_pool ; /*pre-allocated list of BD used for transfer */
36
46
uint32_t bd_count ; /* number of bd */
37
47
uint32_t capacity ; /* total transfer capacity for this channel */
@@ -281,9 +291,14 @@ static int dma_nxp_sdma_config(const struct device *dev, uint32_t channel,
281
291
return - EINVAL ;
282
292
}
283
293
284
- dma_nxp_sdma_channel_init (dev , channel );
285
-
286
294
chan_data = & dev_data -> chan [channel ];
295
+
296
+ if (chan_data -> state == SDMA_CHAN_STATE_STARTED ) {
297
+ LOG_ERR ("sdma_config() cannot configure channel while active" );
298
+ return - EINVAL ;
299
+ }
300
+
301
+ dma_nxp_sdma_channel_init (dev , channel );
287
302
chan_data -> dev = dev ;
288
303
chan_data -> direction = config -> channel_direction ;
289
304
@@ -330,6 +345,7 @@ static int dma_nxp_sdma_config(const struct device *dev, uint32_t channel,
330
345
chan_data -> transfer_cfg .isEventIgnore = false;
331
346
chan_data -> transfer_cfg .isSoftTriggerIgnore = false;
332
347
SDMA_SubmitTransfer (& chan_data -> handle , & chan_data -> transfer_cfg );
348
+ chan_data -> state = SDMA_CHAN_STATE_CONFIGURED ;
333
349
334
350
return 0 ;
335
351
}
@@ -347,8 +363,15 @@ static int dma_nxp_sdma_start(const struct device *dev, uint32_t channel)
347
363
348
364
chan_data = & dev_data -> chan [channel ];
349
365
366
+ if (chan_data -> state != SDMA_CHAN_STATE_STOPPED &&
367
+ chan_data -> state != SDMA_CHAN_STATE_CONFIGURED ) {
368
+ LOG_ERR ("%s: invalid state %d" , __func__ , chan_data -> state );
369
+ return - EINVAL ;
370
+ }
371
+
350
372
SDMA_SetChannelPriority (dev_cfg -> base , channel , DMA_NXP_SDMA_CHAN_DEFAULT_PRIO );
351
373
SDMA_StartChannelSoftware (dev_cfg -> base , channel );
374
+ chan_data -> state = SDMA_CHAN_STATE_STARTED ;
352
375
353
376
return 0 ;
354
377
}
@@ -366,6 +389,9 @@ static int dma_nxp_sdma_stop(const struct device *dev, uint32_t channel)
366
389
chan_data = & dev_data -> chan [channel ];
367
390
368
391
SDMA_StopTransfer (& chan_data -> handle );
392
+
393
+ chan_data -> state = SDMA_CHAN_STATE_STOPPED ;
394
+
369
395
return 0 ;
370
396
}
371
397
@@ -395,6 +421,12 @@ static int dma_nxp_sdma_reload(const struct device *dev, uint32_t channel, uint3
395
421
return 0 ;
396
422
}
397
423
424
+ /* allow reload only for active channels */
425
+ if (chan_data -> state != SDMA_CHAN_STATE_STARTED ) {
426
+ LOG_ERR ("%s: invalid state %d" , __func__ , chan_data -> state );
427
+ return - EINVAL ;
428
+ }
429
+
398
430
if (chan_data -> direction == MEMORY_TO_PERIPHERAL ) {
399
431
dma_nxp_sdma_produce (chan_data , size );
400
432
} else {
@@ -439,6 +471,7 @@ static bool sdma_channel_filter(const struct device *dev, int chan_id, void *par
439
471
dev_data -> chan [chan_id ].event_source = * ((int * )param );
440
472
dev_data -> chan [chan_id ].index = chan_id ;
441
473
dev_data -> chan [chan_id ].capacity = 0 ;
474
+ dev_data -> chan [chan_id ].state = SDMA_CHAN_STATE_INIT ;
442
475
443
476
return true;
444
477
}
0 commit comments