ALSA: hda - Support max codecs to 8 for nvidia hda controller
Support max codecs to 8 for nvidia hda controller.
Change AZX_MAX_CODECS to 8, and add
"#define AZX_DEFAULT_CODECS 4" for default driver.
Set azx_max_codecs to 8 for nvidia controller.
Signed-off-by: Wei Ni <wni@nvidia.com>
Signed-off-by: Takashi Iwai <tiwai@suse.de>
diff --git a/sound/pci/hda/hda_intel.c b/sound/pci/hda/hda_intel.c
index 1adac8c..b104757 100644
--- a/sound/pci/hda/hda_intel.c
+++ b/sound/pci/hda/hda_intel.c
@@ -267,7 +267,8 @@
#define RIRB_INT_MASK 0x05
/* STATESTS int mask: S3,SD2,SD1,SD0 */
-#define AZX_MAX_CODECS 4
+#define AZX_MAX_CODECS 8
+#define AZX_DEFAULT_CODECS 4
#define STATESTS_INT_MASK ((1 << AZX_MAX_CODECS) - 1)
/* SD_CTL bits */
@@ -1367,6 +1368,7 @@
/* number of codec slots for each chipset: 0 = default slots (i.e. 4) */
static unsigned int azx_max_codecs[AZX_NUM_DRIVERS] __devinitdata = {
+ [AZX_DRIVER_NVIDIA] = 8,
[AZX_DRIVER_TERA] = 1,
};
@@ -1399,7 +1401,7 @@
codecs = 0;
max_slots = azx_max_codecs[chip->driver_type];
if (!max_slots)
- max_slots = AZX_MAX_CODECS;
+ max_slots = AZX_DEFAULT_CODECS;
/* First try to probe all given codec slots */
for (c = 0; c < max_slots; c++) {