1 // SPDX-License-Identifier: GPL-2.0-only
2 /******************************************************************************
4 AudioScience HPI driver
5 Copyright (C) 1997-2014 AudioScience Inc. <support@audioscience.com>
10 Common functions used by hpixxxx.c modules
12 (C) Copyright AudioScience Inc. 1998-2003
13 *******************************************************************************/
14 #define SOURCEFILE_NAME "hpicmn.c"
16 #include "hpi_internal.h"
18 #include "hpimsginit.h"
22 struct hpi_adapters_list {
23 struct hpios_spinlock list_lock;
24 struct hpi_adapter_obj adapter[HPI_MAX_ADAPTERS];
28 static struct hpi_adapters_list adapters;
31 * Given an HPI Message that was sent out and a response that was received,
32 * validate that the response has the correct fields filled in,
33 * i.e ObjectType, Function etc
35 u16 hpi_validate_response(struct hpi_message *phm, struct hpi_response *phr)
37 if (phr->type != HPI_TYPE_RESPONSE) {
38 HPI_DEBUG_LOG(ERROR, "header type %d invalid\n", phr->type);
39 return HPI_ERROR_INVALID_RESPONSE;
42 if (phr->object != phm->object) {
43 HPI_DEBUG_LOG(ERROR, "header object %d invalid\n",
45 return HPI_ERROR_INVALID_RESPONSE;
48 if (phr->function != phm->function) {
49 HPI_DEBUG_LOG(ERROR, "header function %d invalid\n",
51 return HPI_ERROR_INVALID_RESPONSE;
57 u16 hpi_add_adapter(struct hpi_adapter_obj *pao)
60 /*HPI_ASSERT(pao->type); */
62 hpios_alistlock_lock(&adapters);
64 if (pao->index >= HPI_MAX_ADAPTERS) {
65 retval = HPI_ERROR_BAD_ADAPTER_NUMBER;
69 if (adapters.adapter[pao->index].type) {
71 for (a = HPI_MAX_ADAPTERS - 1; a >= 0; a--) {
72 if (!adapters.adapter[a].type) {
73 HPI_DEBUG_LOG(WARNING,
74 "ASI%X duplicate index %d moved to %d\n",
75 pao->type, pao->index, a);
81 retval = HPI_ERROR_DUPLICATE_ADAPTER_NUMBER;
85 adapters.adapter[pao->index] = *pao;
86 hpios_dsplock_init(&adapters.adapter[pao->index]);
87 adapters.gw_num_adapters++;
90 hpios_alistlock_unlock(&adapters);
94 void hpi_delete_adapter(struct hpi_adapter_obj *pao)
97 HPI_DEBUG_LOG(ERROR, "removing null adapter?\n");
101 hpios_alistlock_lock(&adapters);
102 if (adapters.adapter[pao->index].type)
103 adapters.gw_num_adapters--;
104 memset(&adapters.adapter[pao->index], 0, sizeof(adapters.adapter[0]));
105 hpios_alistlock_unlock(&adapters);
109 * FindAdapter returns a pointer to the struct hpi_adapter_obj with
110 * index wAdapterIndex in an HPI_ADAPTERS_LIST structure.
113 struct hpi_adapter_obj *hpi_find_adapter(u16 adapter_index)
115 struct hpi_adapter_obj *pao = NULL;
117 if (adapter_index >= HPI_MAX_ADAPTERS) {
118 HPI_DEBUG_LOG(VERBOSE, "find_adapter invalid index %d\n",
123 pao = &adapters.adapter[adapter_index];
124 if (pao->type != 0) {
126 HPI_DEBUG_LOG(VERBOSE, "Found adapter index %d\n",
132 HPI_DEBUG_LOG(VERBOSE, "No adapter index %d\n",
141 * wipe an HPI_ADAPTERS_LIST structure.
144 static void wipe_adapter_list(void)
146 memset(&adapters, 0, sizeof(adapters));
149 static void subsys_get_adapter(struct hpi_message *phm,
150 struct hpi_response *phr)
152 int count = phm->obj_index;
155 /* find the nCount'th nonzero adapter in array */
156 for (index = 0; index < HPI_MAX_ADAPTERS; index++) {
157 if (adapters.adapter[index].type) {
164 if (index < HPI_MAX_ADAPTERS) {
165 phr->u.s.adapter_index = adapters.adapter[index].index;
166 phr->u.s.adapter_type = adapters.adapter[index].type;
168 phr->u.s.adapter_index = 0;
169 phr->u.s.adapter_type = 0;
170 phr->error = HPI_ERROR_INVALID_OBJ_INDEX;
174 static unsigned int control_cache_alloc_check(struct hpi_control_cache *pC)
187 if (pC->control_count && pC->cache_size_in_bytes) {
188 char *p_master_cache;
189 unsigned int byte_count = 0;
191 p_master_cache = (char *)pC->p_cache;
192 HPI_DEBUG_LOG(DEBUG, "check %d controls\n",
194 for (i = 0; i < pC->control_count; i++) {
195 struct hpi_control_cache_info *info =
196 (struct hpi_control_cache_info *)
197 &p_master_cache[byte_count];
198 u16 control_index = info->control_index;
200 if (control_index >= pC->control_count) {
202 "adap %d control index %d out of range, cache not ready?\n",
203 pC->adap_idx, control_index);
207 if (!info->size_in32bit_words) {
210 "adap %d cache not ready?\n",
214 /* The cache is invalid.
215 * Minimum valid entry size is
216 * sizeof(struct hpi_control_cache_info)
219 "adap %d zero size cache entry %d\n",
224 if (info->control_type) {
225 pC->p_info[control_index] = info;
227 } else { /* dummy cache entry */
228 pC->p_info[control_index] = NULL;
231 byte_count += info->size_in32bit_words * 4;
233 HPI_DEBUG_LOG(VERBOSE,
234 "cached %d, pinfo %p index %d type %d size %d\n",
235 cached, pC->p_info[info->control_index],
236 info->control_index, info->control_type,
237 info->size_in32bit_words);
239 /* quit loop early if whole cache has been scanned.
240 * dwControlCount is the maximum possible entries
241 * but some may be absent from the cache
243 if (byte_count >= pC->cache_size_in_bytes)
245 /* have seen last control index */
246 if (info->control_index == pC->control_count - 1)
250 if (byte_count != pC->cache_size_in_bytes)
251 HPI_DEBUG_LOG(WARNING,
252 "adap %d bytecount %d != cache size %d\n",
253 pC->adap_idx, byte_count,
254 pC->cache_size_in_bytes);
257 "adap %d cache good, bytecount == cache size = %d\n",
258 pC->adap_idx, byte_count);
260 pC->init = (u16)cached;
267 static short find_control(u16 control_index,
268 struct hpi_control_cache *p_cache, struct hpi_control_cache_info **pI)
270 if (!control_cache_alloc_check(p_cache)) {
271 HPI_DEBUG_LOG(VERBOSE,
272 "control_cache_alloc_check() failed %d\n",
277 *pI = p_cache->p_info[control_index];
279 HPI_DEBUG_LOG(VERBOSE, "Uncached Control %d\n",
283 HPI_DEBUG_LOG(VERBOSE, "find_control() type %d\n",
284 (*pI)->control_type);
289 /* allow unified treatment of several string fields within struct */
290 #define HPICMN_PAD_OFS_AND_SIZE(m) {\
291 offsetof(struct hpi_control_cache_pad, m), \
292 sizeof(((struct hpi_control_cache_pad *)(NULL))->m) }
294 struct pad_ofs_size {
296 unsigned int field_size;
299 static const struct pad_ofs_size pad_desc[] = {
300 HPICMN_PAD_OFS_AND_SIZE(c_channel), /* HPI_PAD_CHANNEL_NAME */
301 HPICMN_PAD_OFS_AND_SIZE(c_artist), /* HPI_PAD_ARTIST */
302 HPICMN_PAD_OFS_AND_SIZE(c_title), /* HPI_PAD_TITLE */
303 HPICMN_PAD_OFS_AND_SIZE(c_comment), /* HPI_PAD_COMMENT */
306 /** CheckControlCache checks the cache and fills the struct hpi_response
307 * accordingly. It returns one if a cache hit occurred, zero otherwise.
309 short hpi_check_control_cache_single(struct hpi_control_cache_single *pC,
310 struct hpi_message *phm, struct hpi_response *phr)
312 size_t response_size;
315 /* set the default response size */
317 sizeof(struct hpi_response_header) +
318 sizeof(struct hpi_control_res);
320 switch (pC->u.i.control_type) {
322 case HPI_CONTROL_METER:
323 if (phm->u.c.attribute == HPI_METER_PEAK) {
324 phr->u.c.an_log_value[0] = pC->u.meter.an_log_peak[0];
325 phr->u.c.an_log_value[1] = pC->u.meter.an_log_peak[1];
326 } else if (phm->u.c.attribute == HPI_METER_RMS) {
327 if (pC->u.meter.an_logRMS[0] ==
328 HPI_CACHE_INVALID_SHORT) {
330 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
331 phr->u.c.an_log_value[0] = HPI_METER_MINIMUM;
332 phr->u.c.an_log_value[1] = HPI_METER_MINIMUM;
334 phr->u.c.an_log_value[0] =
335 pC->u.meter.an_logRMS[0];
336 phr->u.c.an_log_value[1] =
337 pC->u.meter.an_logRMS[1];
342 case HPI_CONTROL_VOLUME:
343 if (phm->u.c.attribute == HPI_VOLUME_GAIN) {
344 phr->u.c.an_log_value[0] = pC->u.vol.an_log[0];
345 phr->u.c.an_log_value[1] = pC->u.vol.an_log[1];
346 } else if (phm->u.c.attribute == HPI_VOLUME_MUTE) {
347 if (pC->u.vol.flags & HPI_VOLUME_FLAG_HAS_MUTE) {
348 if (pC->u.vol.flags & HPI_VOLUME_FLAG_MUTED)
350 HPI_BITMASK_ALL_CHANNELS;
355 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
362 case HPI_CONTROL_MULTIPLEXER:
363 if (phm->u.c.attribute == HPI_MULTIPLEXER_SOURCE) {
364 phr->u.c.param1 = pC->u.mux.source_node_type;
365 phr->u.c.param2 = pC->u.mux.source_node_index;
370 case HPI_CONTROL_CHANNEL_MODE:
371 if (phm->u.c.attribute == HPI_CHANNEL_MODE_MODE)
372 phr->u.c.param1 = pC->u.mode.mode;
376 case HPI_CONTROL_LEVEL:
377 if (phm->u.c.attribute == HPI_LEVEL_GAIN) {
378 phr->u.c.an_log_value[0] = pC->u.level.an_log[0];
379 phr->u.c.an_log_value[1] = pC->u.level.an_log[1];
383 case HPI_CONTROL_TUNER:
384 if (phm->u.c.attribute == HPI_TUNER_FREQ)
385 phr->u.c.param1 = pC->u.tuner.freq_ink_hz;
386 else if (phm->u.c.attribute == HPI_TUNER_BAND)
387 phr->u.c.param1 = pC->u.tuner.band;
388 else if (phm->u.c.attribute == HPI_TUNER_LEVEL_AVG)
389 if (pC->u.tuner.s_level_avg ==
390 HPI_CACHE_INVALID_SHORT) {
391 phr->u.cu.tuner.s_level = 0;
393 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
395 phr->u.cu.tuner.s_level =
396 pC->u.tuner.s_level_avg;
400 case HPI_CONTROL_AESEBU_RECEIVER:
401 if (phm->u.c.attribute == HPI_AESEBURX_ERRORSTATUS)
402 phr->u.c.param1 = pC->u.aes3rx.error_status;
403 else if (phm->u.c.attribute == HPI_AESEBURX_FORMAT)
404 phr->u.c.param1 = pC->u.aes3rx.format;
408 case HPI_CONTROL_AESEBU_TRANSMITTER:
409 if (phm->u.c.attribute == HPI_AESEBUTX_FORMAT)
410 phr->u.c.param1 = pC->u.aes3tx.format;
414 case HPI_CONTROL_TONEDETECTOR:
415 if (phm->u.c.attribute == HPI_TONEDETECTOR_STATE)
416 phr->u.c.param1 = pC->u.tone.state;
420 case HPI_CONTROL_SILENCEDETECTOR:
421 if (phm->u.c.attribute == HPI_SILENCEDETECTOR_STATE) {
422 phr->u.c.param1 = pC->u.silence.state;
426 case HPI_CONTROL_MICROPHONE:
427 if (phm->u.c.attribute == HPI_MICROPHONE_PHANTOM_POWER)
428 phr->u.c.param1 = pC->u.microphone.phantom_state;
432 case HPI_CONTROL_SAMPLECLOCK:
433 if (phm->u.c.attribute == HPI_SAMPLECLOCK_SOURCE)
434 phr->u.c.param1 = pC->u.clk.source;
435 else if (phm->u.c.attribute == HPI_SAMPLECLOCK_SOURCE_INDEX) {
436 if (pC->u.clk.source_index ==
437 HPI_CACHE_INVALID_UINT16) {
440 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
442 phr->u.c.param1 = pC->u.clk.source_index;
443 } else if (phm->u.c.attribute == HPI_SAMPLECLOCK_SAMPLERATE)
444 phr->u.c.param1 = pC->u.clk.sample_rate;
448 case HPI_CONTROL_PAD:{
449 struct hpi_control_cache_pad *p_pad;
450 p_pad = (struct hpi_control_cache_pad *)pC;
452 if (!(p_pad->field_valid_flags & (1 <<
453 HPI_CTL_ATTR_INDEX(phm->u.c.
456 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
460 if (phm->u.c.attribute == HPI_PAD_PROGRAM_ID)
461 phr->u.c.param1 = p_pad->pI;
462 else if (phm->u.c.attribute == HPI_PAD_PROGRAM_TYPE)
463 phr->u.c.param1 = p_pad->pTY;
466 HPI_CTL_ATTR_INDEX(phm->u.c.
468 unsigned int offset = phm->u.c.param1;
469 unsigned int pad_string_len, field_size;
473 if (index > ARRAY_SIZE(pad_desc) - 1) {
475 HPI_ERROR_INVALID_CONTROL_ATTRIBUTE;
481 pad_desc[index].offset;
482 field_size = pad_desc[index].field_size;
483 /* Ensure null terminator */
484 pad_string[field_size - 1] = 0;
486 pad_string_len = strlen(pad_string) + 1;
488 if (offset > pad_string_len) {
490 HPI_ERROR_INVALID_CONTROL_VALUE;
494 tocopy = pad_string_len - offset;
495 if (tocopy > sizeof(phr->u.cu.chars8.sz_data))
496 tocopy = sizeof(phr->u.cu.chars8.
499 memcpy(phr->u.cu.chars8.sz_data,
500 &pad_string[offset], tocopy);
502 phr->u.cu.chars8.remaining_chars =
503 pad_string_len - offset - tocopy;
512 HPI_DEBUG_LOG(VERBOSE, "%s Adap %d, Ctl %d, Type %d, Attr %d\n",
513 found ? "Cached" : "Uncached", phm->adapter_index,
514 pC->u.i.control_index, pC->u.i.control_type,
518 phr->size = (u16)response_size;
519 phr->type = HPI_TYPE_RESPONSE;
520 phr->object = phm->object;
521 phr->function = phm->function;
527 short hpi_check_control_cache(struct hpi_control_cache *p_cache,
528 struct hpi_message *phm, struct hpi_response *phr)
530 struct hpi_control_cache_info *pI;
532 if (!find_control(phm->obj_index, p_cache, &pI)) {
533 HPI_DEBUG_LOG(VERBOSE,
534 "HPICMN find_control() failed for adap %d\n",
540 phr->specific_error = 0;
543 return hpi_check_control_cache_single((struct hpi_control_cache_single
547 /** Updates the cache with Set values.
549 Only update if no error.
550 Volume and Level return the limited values in the response, so use these
551 Multiplexer does so use sent values
553 void hpi_cmn_control_cache_sync_to_msg_single(struct hpi_control_cache_single
554 *pC, struct hpi_message *phm, struct hpi_response *phr)
556 switch (pC->u.i.control_type) {
557 case HPI_CONTROL_VOLUME:
558 if (phm->u.c.attribute == HPI_VOLUME_GAIN) {
559 pC->u.vol.an_log[0] = phr->u.c.an_log_value[0];
560 pC->u.vol.an_log[1] = phr->u.c.an_log_value[1];
561 } else if (phm->u.c.attribute == HPI_VOLUME_MUTE) {
563 pC->u.vol.flags |= HPI_VOLUME_FLAG_MUTED;
565 pC->u.vol.flags &= ~HPI_VOLUME_FLAG_MUTED;
568 case HPI_CONTROL_MULTIPLEXER:
569 /* mux does not return its setting on Set command. */
570 if (phm->u.c.attribute == HPI_MULTIPLEXER_SOURCE) {
571 pC->u.mux.source_node_type = (u16)phm->u.c.param1;
572 pC->u.mux.source_node_index = (u16)phm->u.c.param2;
575 case HPI_CONTROL_CHANNEL_MODE:
576 /* mode does not return its setting on Set command. */
577 if (phm->u.c.attribute == HPI_CHANNEL_MODE_MODE)
578 pC->u.mode.mode = (u16)phm->u.c.param1;
580 case HPI_CONTROL_LEVEL:
581 if (phm->u.c.attribute == HPI_LEVEL_GAIN) {
582 pC->u.vol.an_log[0] = phr->u.c.an_log_value[0];
583 pC->u.vol.an_log[1] = phr->u.c.an_log_value[1];
586 case HPI_CONTROL_MICROPHONE:
587 if (phm->u.c.attribute == HPI_MICROPHONE_PHANTOM_POWER)
588 pC->u.microphone.phantom_state = (u16)phm->u.c.param1;
590 case HPI_CONTROL_AESEBU_TRANSMITTER:
591 if (phm->u.c.attribute == HPI_AESEBUTX_FORMAT)
592 pC->u.aes3tx.format = phm->u.c.param1;
594 case HPI_CONTROL_AESEBU_RECEIVER:
595 if (phm->u.c.attribute == HPI_AESEBURX_FORMAT)
596 pC->u.aes3rx.format = phm->u.c.param1;
598 case HPI_CONTROL_SAMPLECLOCK:
599 if (phm->u.c.attribute == HPI_SAMPLECLOCK_SOURCE)
600 pC->u.clk.source = (u16)phm->u.c.param1;
601 else if (phm->u.c.attribute == HPI_SAMPLECLOCK_SOURCE_INDEX)
602 pC->u.clk.source_index = (u16)phm->u.c.param1;
603 else if (phm->u.c.attribute == HPI_SAMPLECLOCK_SAMPLERATE)
604 pC->u.clk.sample_rate = phm->u.c.param1;
611 void hpi_cmn_control_cache_sync_to_msg(struct hpi_control_cache *p_cache,
612 struct hpi_message *phm, struct hpi_response *phr)
614 struct hpi_control_cache_single *pC;
615 struct hpi_control_cache_info *pI;
620 if (!find_control(phm->obj_index, p_cache, &pI)) {
621 HPI_DEBUG_LOG(VERBOSE,
622 "HPICMN find_control() failed for adap %d\n",
627 /* pC is the default cached control strucure.
628 May be cast to something else in the following switch statement.
630 pC = (struct hpi_control_cache_single *)pI;
632 hpi_cmn_control_cache_sync_to_msg_single(pC, phm, phr);
635 /** Allocate control cache.
637 \return Cache pointer, or NULL if allocation fails.
639 struct hpi_control_cache *hpi_alloc_control_cache(const u32 control_count,
640 const u32 size_in_bytes, u8 *p_dsp_control_buffer)
642 struct hpi_control_cache *p_cache =
643 kmalloc(sizeof(*p_cache), GFP_KERNEL);
648 kcalloc(control_count, sizeof(*p_cache->p_info), GFP_KERNEL);
649 if (!p_cache->p_info) {
654 p_cache->cache_size_in_bytes = size_in_bytes;
655 p_cache->control_count = control_count;
656 p_cache->p_cache = p_dsp_control_buffer;
661 void hpi_free_control_cache(struct hpi_control_cache *p_cache)
664 kfree(p_cache->p_info);
669 static void subsys_message(struct hpi_message *phm, struct hpi_response *phr)
671 hpi_init_response(phr, HPI_OBJ_SUBSYSTEM, phm->function, 0);
673 switch (phm->function) {
674 case HPI_SUBSYS_OPEN:
675 case HPI_SUBSYS_CLOSE:
676 case HPI_SUBSYS_DRIVER_UNLOAD:
678 case HPI_SUBSYS_DRIVER_LOAD:
680 hpios_alistlock_init(&adapters);
682 case HPI_SUBSYS_GET_ADAPTER:
683 subsys_get_adapter(phm, phr);
685 case HPI_SUBSYS_GET_NUM_ADAPTERS:
686 phr->u.s.num_adapters = adapters.gw_num_adapters;
688 case HPI_SUBSYS_CREATE_ADAPTER:
691 phr->error = HPI_ERROR_INVALID_FUNC;
696 void HPI_COMMON(struct hpi_message *phm, struct hpi_response *phr)
699 case HPI_TYPE_REQUEST:
700 switch (phm->object) {
701 case HPI_OBJ_SUBSYSTEM:
702 subsys_message(phm, phr);
708 phr->error = HPI_ERROR_INVALID_TYPE;