1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 // Rander Wang <rander.wang@intel.com>
11 // Keyon Jie <yang.jie@linux.intel.com>
15 * Hardware interface for generic Intel audio DSP HDA IP
18 #include <linux/pm_runtime.h>
19 #include <sound/hdaudio_ext.h>
20 #include <sound/hda_register.h>
21 #include <sound/sof.h>
23 #include "../sof-audio.h"
27 * set up one of BDL entries for a stream
29 static int hda_setup_bdle(struct snd_sof_dev *sdev,
30 struct snd_dma_buffer *dmab,
31 struct hdac_stream *stream,
32 struct sof_intel_dsp_bdl **bdlp,
33 int offset, int size, int ioc)
35 struct hdac_bus *bus = sof_to_bus(sdev);
36 struct sof_intel_dsp_bdl *bdl = *bdlp;
42 if (stream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
43 dev_err(sdev->dev, "error: stream frags exceeded\n");
47 addr = snd_sgbuf_get_addr(dmab, offset);
48 /* program BDL addr */
49 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
50 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
51 /* program BDL size */
52 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
53 /* one BDLE should not cross 4K boundary */
54 if (bus->align_bdle_4k) {
55 u32 remain = 0x1000 - (offset & 0xfff);
60 bdl->size = cpu_to_le32(chunk);
61 /* only program IOC when the whole segment is processed */
63 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
68 dev_vdbg(sdev->dev, "bdl, frags:%d, chunk size:0x%x;\n",
69 stream->frags, chunk);
77 * set up Buffer Descriptor List (BDL) for host memory transfer
78 * BDL describes the location of the individual buffers and is little endian.
80 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
81 struct snd_dma_buffer *dmab,
82 struct hdac_stream *stream)
84 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
85 struct sof_intel_dsp_bdl *bdl;
86 int i, offset, period_bytes, periods;
89 period_bytes = stream->period_bytes;
90 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
92 period_bytes = stream->bufsize;
94 periods = stream->bufsize / period_bytes;
96 dev_dbg(sdev->dev, "periods:%d\n", periods);
98 remain = stream->bufsize % period_bytes;
102 /* program the initial BDL entries */
103 bdl = (struct sof_intel_dsp_bdl *)stream->bdl.area;
108 * set IOC if don't use position IPC
109 * and period_wakeup needed.
111 ioc = hda->no_ipc_position ?
112 !stream->no_period_wakeup : 0;
114 for (i = 0; i < periods; i++) {
115 if (i == (periods - 1) && remain)
116 /* set the last small entry */
117 offset = hda_setup_bdle(sdev, dmab,
118 stream, &bdl, offset,
121 offset = hda_setup_bdle(sdev, dmab,
122 stream, &bdl, offset,
129 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
130 struct hdac_ext_stream *stream,
131 int enable, u32 size)
133 struct hdac_stream *hstream = &stream->hstream;
136 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
137 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
141 mask = (1 << hstream->index);
143 /* enable/disable SPIB for the stream */
144 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
145 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
146 enable << hstream->index);
148 /* set the SPIB value */
149 sof_io_write(sdev, stream->spib_addr, size);
154 /* get next unused stream */
155 struct hdac_ext_stream *
156 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction)
158 struct hdac_bus *bus = sof_to_bus(sdev);
159 struct sof_intel_hda_stream *hda_stream;
160 struct hdac_ext_stream *stream = NULL;
161 struct hdac_stream *s;
163 spin_lock_irq(&bus->reg_lock);
165 /* get an unused stream */
166 list_for_each_entry(s, &bus->stream_list, list) {
167 if (s->direction == direction && !s->opened) {
168 stream = stream_to_hdac_ext_stream(s);
169 hda_stream = container_of(stream,
170 struct sof_intel_hda_stream,
172 /* check if the host DMA channel is reserved */
173 if (hda_stream->host_reserved)
181 spin_unlock_irq(&bus->reg_lock);
185 dev_err(sdev->dev, "error: no free %s streams\n",
186 direction == SNDRV_PCM_STREAM_PLAYBACK ?
187 "playback" : "capture");
190 * Disable DMI Link L1 entry when capture stream is opened.
191 * Workaround to address a known issue with host DMA that results
192 * in xruns during pause/release in capture scenarios.
194 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
195 if (stream && direction == SNDRV_PCM_STREAM_CAPTURE)
196 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
198 HDA_VS_INTEL_EM2_L1SEN, 0);
204 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
206 struct hdac_bus *bus = sof_to_bus(sdev);
207 struct hdac_stream *s;
208 bool active_capture_stream = false;
211 spin_lock_irq(&bus->reg_lock);
214 * close stream matching the stream tag
215 * and check if there are any open capture streams.
217 list_for_each_entry(s, &bus->stream_list, list) {
221 if (s->direction == direction && s->stream_tag == stream_tag) {
224 } else if (s->direction == SNDRV_PCM_STREAM_CAPTURE) {
225 active_capture_stream = true;
229 spin_unlock_irq(&bus->reg_lock);
231 /* Enable DMI L1 entry if there are no capture streams open */
232 if (!IS_ENABLED(CONFIG_SND_SOC_SOF_HDA_ALWAYS_ENABLE_DMI_L1))
233 if (!active_capture_stream)
234 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
236 HDA_VS_INTEL_EM2_L1SEN,
237 HDA_VS_INTEL_EM2_L1SEN);
240 dev_dbg(sdev->dev, "stream_tag %d not opened!\n", stream_tag);
247 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
248 struct hdac_ext_stream *stream, int cmd)
250 struct hdac_stream *hstream = &stream->hstream;
251 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
252 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
256 /* cmd must be for audio stream */
258 case SNDRV_PCM_TRIGGER_RESUME:
259 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
260 case SNDRV_PCM_TRIGGER_START:
261 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
263 1 << hstream->index);
265 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
267 SOF_HDA_SD_CTL_DMA_START |
268 SOF_HDA_CL_DMA_SD_INT_MASK,
269 SOF_HDA_SD_CTL_DMA_START |
270 SOF_HDA_CL_DMA_SD_INT_MASK);
272 ret = snd_sof_dsp_read_poll_timeout(sdev,
275 ((run & dma_start) == dma_start),
276 HDA_DSP_REG_POLL_INTERVAL_US,
277 HDA_DSP_STREAM_RUN_TIMEOUT);
281 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
286 hstream->running = true;
288 case SNDRV_PCM_TRIGGER_SUSPEND:
289 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
290 case SNDRV_PCM_TRIGGER_STOP:
291 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
293 SOF_HDA_SD_CTL_DMA_START |
294 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
296 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
299 HDA_DSP_REG_POLL_INTERVAL_US,
300 HDA_DSP_STREAM_RUN_TIMEOUT);
304 "error: %s: cmd %d: timeout on STREAM_SD_OFFSET read\n",
309 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset +
310 SOF_HDA_ADSP_REG_CL_SD_STS,
311 SOF_HDA_CL_DMA_SD_INT_MASK);
313 hstream->running = false;
314 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
315 1 << hstream->index, 0x0);
318 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
326 * prepare for common hdac registers settings, for both code loader
329 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
330 struct hdac_ext_stream *stream,
331 struct snd_dma_buffer *dmab,
332 struct snd_pcm_hw_params *params)
334 struct hdac_bus *bus = sof_to_bus(sdev);
335 struct hdac_stream *hstream = &stream->hstream;
336 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
337 int ret, timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
338 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
343 dev_err(sdev->dev, "error: no stream available\n");
347 /* decouple host and link DMA */
348 mask = 0x1 << hstream->index;
349 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
353 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
357 /* clear stream status */
358 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
359 SOF_HDA_CL_DMA_SD_INT_MASK |
360 SOF_HDA_SD_CTL_DMA_START, 0);
362 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
365 HDA_DSP_REG_POLL_INTERVAL_US,
366 HDA_DSP_STREAM_RUN_TIMEOUT);
370 "error: %s: timeout on STREAM_SD_OFFSET read1\n",
375 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
376 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
377 SOF_HDA_CL_DMA_SD_INT_MASK,
378 SOF_HDA_CL_DMA_SD_INT_MASK);
381 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
385 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
391 dev_err(sdev->dev, "error: stream reset failed\n");
395 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
396 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 0x1,
399 /* wait for hardware to report that stream is out of reset */
402 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
404 if ((val & 0x1) == 0)
408 dev_err(sdev->dev, "error: timeout waiting for stream reset\n");
413 *hstream->posbuf = 0;
415 /* reset BDL address */
416 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
417 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
419 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
420 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
423 /* clear stream status */
424 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
425 SOF_HDA_CL_DMA_SD_INT_MASK |
426 SOF_HDA_SD_CTL_DMA_START, 0);
428 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
431 HDA_DSP_REG_POLL_INTERVAL_US,
432 HDA_DSP_STREAM_RUN_TIMEOUT);
436 "error: %s: timeout on STREAM_SD_OFFSET read2\n",
441 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
442 sd_offset + SOF_HDA_ADSP_REG_CL_SD_STS,
443 SOF_HDA_CL_DMA_SD_INT_MASK,
444 SOF_HDA_CL_DMA_SD_INT_MASK);
448 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
450 dev_err(sdev->dev, "error: set up of BDL failed\n");
454 /* program stream tag to set up stream descriptor for DMA */
455 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
456 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
457 hstream->stream_tag <<
458 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
460 /* program cyclic buffer length */
461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
462 sd_offset + SOF_HDA_ADSP_REG_CL_SD_CBL,
466 * Recommended hardware programming sequence for HDAudio DMA format
468 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
469 * for corresponding stream index before the time of writing
470 * format to SDxFMT register.
472 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
473 * enable decoupled mode
476 /* couple host and link DMA, disable DSP features */
477 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
480 /* program stream format */
481 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
483 SOF_HDA_ADSP_REG_CL_SD_FORMAT,
484 0xffff, hstream->format_val);
486 /* decouple host and link DMA, enable DSP features */
487 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
490 /* program last valid index */
491 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
492 sd_offset + SOF_HDA_ADSP_REG_CL_SD_LVI,
493 0xffff, (hstream->frags - 1));
495 /* program BDL address */
496 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
497 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPL,
498 (u32)hstream->bdl.addr);
499 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
500 sd_offset + SOF_HDA_ADSP_REG_CL_SD_BDLPU,
501 upper_32_bits(hstream->bdl.addr));
503 /* enable position buffer */
504 if (!(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
505 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
506 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
507 upper_32_bits(bus->posbuf.addr));
508 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
509 (u32)bus->posbuf.addr |
510 SOF_HDA_ADSP_DPLBASE_ENABLE);
513 /* set interrupt enable bits */
514 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
515 SOF_HDA_CL_DMA_SD_INT_MASK,
516 SOF_HDA_CL_DMA_SD_INT_MASK);
519 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
521 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
523 SOF_HDA_ADSP_REG_CL_SD_FIFOSIZE);
524 hstream->fifo_size &= 0xffff;
525 hstream->fifo_size += 1;
527 hstream->fifo_size = 0;
533 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
534 struct snd_pcm_substream *substream)
536 struct hdac_stream *stream = substream->runtime->private_data;
537 struct hdac_ext_stream *link_dev = container_of(stream,
538 struct hdac_ext_stream,
540 struct hdac_bus *bus = sof_to_bus(sdev);
541 u32 mask = 0x1 << stream->index;
543 spin_lock_irq(&bus->reg_lock);
544 /* couple host and link DMA if link DMA channel is idle */
545 if (!link_dev->link_locked)
546 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
547 SOF_HDA_REG_PP_PPCTL, mask, 0);
548 spin_unlock_irq(&bus->reg_lock);
553 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
555 struct hdac_bus *bus = sof_to_bus(sdev);
559 /* The function can be called at irq thread, so use spin_lock_irq */
560 spin_lock_irq(&bus->reg_lock);
562 status = snd_hdac_chip_readl(bus, INTSTS);
563 dev_vdbg(bus->dev, "stream irq, INTSTS status: 0x%x\n", status);
565 /* if Register inaccessible, ignore it.*/
566 if (status != 0xffffffff)
569 spin_unlock_irq(&bus->reg_lock);
574 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
576 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
577 struct hdac_stream *s;
581 list_for_each_entry(s, &bus->stream_list, list) {
582 if (status & BIT(s->index) && s->opened) {
583 sd_status = snd_hdac_stream_readb(s, SD_STS);
585 dev_vdbg(bus->dev, "stream %d status 0x%x\n",
586 s->index, sd_status);
588 snd_hdac_stream_writeb(s, SD_STS, sd_status);
593 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
596 /* Inform ALSA only in case not do that with IPC */
597 if (sof_hda->no_ipc_position)
598 snd_sof_pcm_period_elapsed(s->substream);
605 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
607 struct snd_sof_dev *sdev = context;
608 struct hdac_bus *bus = sof_to_bus(sdev);
609 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
617 * Loop 10 times to handle missed interrupts caused by
618 * unsolicited responses from the codec
620 for (i = 0, active = true; i < 10 && active; i++) {
621 spin_lock_irq(&bus->reg_lock);
623 status = snd_hdac_chip_readl(bus, INTSTS);
626 active = hda_dsp_stream_check(bus, status);
628 /* check and clear RIRB interrupt */
629 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
630 if (status & AZX_INT_CTRL_EN) {
631 rirb_status = snd_hdac_chip_readb(bus, RIRBSTS);
632 if (rirb_status & RIRB_INT_MASK) {
634 if (rirb_status & RIRB_INT_RESPONSE)
635 snd_hdac_bus_update_rirb(bus);
636 snd_hdac_chip_writeb(bus, RIRBSTS,
641 spin_unlock_irq(&bus->reg_lock);
647 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
649 struct hdac_bus *bus = sof_to_bus(sdev);
650 struct hdac_ext_stream *stream;
651 struct hdac_stream *hstream;
652 struct pci_dev *pci = to_pci_dev(sdev->dev);
653 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
655 int i, num_playback, num_capture, num_total, ret;
658 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
659 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
661 /* get stream count from GCAP */
662 num_capture = (gcap >> 8) & 0x0f;
663 num_playback = (gcap >> 12) & 0x0f;
664 num_total = num_playback + num_capture;
666 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
667 num_playback, num_capture);
669 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
670 dev_err(sdev->dev, "error: too many playback streams %d\n",
675 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
676 dev_err(sdev->dev, "error: too many capture streams %d\n",
682 * mem alloc for the position buffer
683 * TODO: check position buffer update
685 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
686 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
689 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
693 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
694 /* mem alloc for the CORB/RIRB ringbuffers */
695 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
696 PAGE_SIZE, &bus->rb);
698 dev_err(sdev->dev, "error: RB alloc failed\n");
703 /* create capture streams */
704 for (i = 0; i < num_capture; i++) {
705 struct sof_intel_hda_stream *hda_stream;
707 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
712 hda_stream->sdev = sdev;
714 stream = &hda_stream->hda_stream;
716 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
717 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
719 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
720 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
721 SOF_HDA_PPLC_INTERVAL * i;
723 /* do we support SPIB */
724 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
725 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
726 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
729 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
730 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
731 SOF_HDA_SPIB_MAXFIFO;
734 hstream = &stream->hstream;
736 hstream->sd_int_sta_mask = 1 << i;
738 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
739 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
740 hstream->stream_tag = i + 1;
741 hstream->opened = false;
742 hstream->running = false;
743 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
745 /* memory alloc for stream BDL */
746 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
747 HDA_DSP_BDL_SIZE, &hstream->bdl);
749 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
752 hstream->posbuf = (__le32 *)(bus->posbuf.area +
753 (hstream->index) * 8);
755 list_add_tail(&hstream->list, &bus->stream_list);
758 /* create playback streams */
759 for (i = num_capture; i < num_total; i++) {
760 struct sof_intel_hda_stream *hda_stream;
762 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
767 hda_stream->sdev = sdev;
769 stream = &hda_stream->hda_stream;
771 /* we always have DSP support */
772 stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
773 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
775 stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
776 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
777 SOF_HDA_PPLC_INTERVAL * i;
779 /* do we support SPIB */
780 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
781 stream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
782 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
785 stream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
786 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
787 SOF_HDA_SPIB_MAXFIFO;
790 hstream = &stream->hstream;
792 hstream->sd_int_sta_mask = 1 << i;
794 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
795 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
796 hstream->stream_tag = i - num_capture + 1;
797 hstream->opened = false;
798 hstream->running = false;
799 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
801 /* mem alloc for stream BDL */
802 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
803 HDA_DSP_BDL_SIZE, &hstream->bdl);
805 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
809 hstream->posbuf = (__le32 *)(bus->posbuf.area +
810 (hstream->index) * 8);
812 list_add_tail(&hstream->list, &bus->stream_list);
815 /* store total stream count (playback + capture) from GCAP */
816 sof_hda->stream_max = num_total;
821 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
823 struct hdac_bus *bus = sof_to_bus(sdev);
824 struct hdac_stream *s, *_s;
825 struct hdac_ext_stream *stream;
826 struct sof_intel_hda_stream *hda_stream;
828 /* free position buffer */
829 if (bus->posbuf.area)
830 snd_dma_free_pages(&bus->posbuf);
832 #if IS_ENABLED(CONFIG_SND_SOC_SOF_HDA)
833 /* free position buffer */
835 snd_dma_free_pages(&bus->rb);
838 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
841 /* free bdl buffer */
843 snd_dma_free_pages(&s->bdl);
845 stream = stream_to_hdac_ext_stream(s);
846 hda_stream = container_of(stream, struct sof_intel_hda_stream,
848 devm_kfree(sdev->dev, hda_stream);