@@ -90,6 +90,7 @@ trace_event(TRACE_CLASS_HOST, __e, ##__VA_ARGS__)
9090#define HDA_STATE_PRELOAD BIT(0)
9191#define HDA_STATE_BF_WAIT BIT(1)
9292#define HDA_STATE_INIT BIT(2)
93+ #define HDA_STATE_RELEASE BIT(3)
9394
9495struct hda_chan_data {
9596 struct dma * dma ;
@@ -252,7 +253,7 @@ static int hda_dma_copy_ch(struct dma *dma, struct hda_chan_data *chan,
252253 hda_dma_inc_fp (dma , chan -> index , bytes );
253254
254255 spin_lock_irq (& dma -> lock , flags );
255- if (chan -> cb ) {
256+ if (chan -> cb && !( chan -> state & HDA_STATE_RELEASE ) ) {
256257 next .src = DMA_RELOAD_LLI ;
257258 next .dest = DMA_RELOAD_LLI ;
258259 next .size = DMA_RELOAD_LLI ;
@@ -286,22 +287,29 @@ static void hda_dma_init(struct dma *dma, int channel)
286287 /* full buffer is copied at startup */
287288 p -> chan [channel ].desc_avail = p -> chan [channel ].desc_count ;
288289
289- p -> chan [channel ].state &= ~HDA_STATE_INIT ;
290-
291290 pm_runtime_put (PM_RUNTIME_HOST_DMA_L1 , 0 );
292291
293292 /* start link output transfer now */
294- if (p -> chan [channel ].direction == DMA_DIR_MEM_TO_DEV )
293+ if (p -> chan [channel ].direction == DMA_DIR_MEM_TO_DEV &&
294+ !(p -> chan [channel ].state & HDA_STATE_RELEASE ))
295295 hda_dma_inc_link_fp (dma , channel ,
296296 p -> chan [channel ].buffer_bytes );
297297
298+ p -> chan [channel ].state &= ~(HDA_STATE_INIT | HDA_STATE_RELEASE );
299+
298300 spin_unlock_irq (& dma -> lock , flags );
299301}
300302
301303static uint64_t hda_dma_work (void * data , uint64_t delay )
302304{
303305 struct hda_chan_data * chan = (struct hda_chan_data * )data ;
304306
307+ /* align pointers on release */
308+ if (chan -> state & HDA_STATE_RELEASE ) {
309+ hda_dma_inc_link_fp (chan -> dma , chan -> index ,
310+ chan -> period_bytes );
311+ }
312+
305313 if (chan -> state & HDA_STATE_INIT )
306314 hda_dma_init (chan -> dma , chan -> index );
307315 else
@@ -428,16 +436,21 @@ static int hda_dma_start(struct dma *dma, int channel)
428436
429437static int hda_dma_release (struct dma * dma , int channel )
430438{
431- /* Implementation left for future alignment
432- *of dma pointers (if needed)
433- */
439+ struct dma_pdata * p = dma_get_drvdata (dma );
440+
434441 uint32_t flags ;
435442
436443 spin_lock_irq (& dma -> lock , flags );
437444
438445 trace_host ("hda-dma-release dma-ptr: %p channel-number: %u" ,
439446 (uint32_t )dma , channel );
440447
448+ /*
449+ * Prepare for the handling of release condition on the first work cb.
450+ * This flag will be unset afterwards.
451+ */
452+ p -> chan [channel ].state |= HDA_STATE_RELEASE ;
453+
441454 spin_unlock_irq (& dma -> lock , flags );
442455 return 0 ;
443456}
@@ -477,6 +490,7 @@ static int hda_dma_stop(struct dma *dma, int channel)
477490 /* disable the channel */
478491 hda_update_bits (dma , channel , DGCS , DGCS_GEN | DGCS_FIFORDY , 0 );
479492 p -> chan [channel ].status = COMP_STATE_PREPARE ;
493+ p -> chan [channel ].state = 0 ;
480494
481495 spin_unlock_irq (& dma -> lock , flags );
482496 return 0 ;
0 commit comments