a2dp播放流程源碼分析

以前分析了a2dp profile 的初始化的流程,這篇文章分析一下,音頻流在bluedroid中的處理流程。react

上層的音頻接口是調用a2dp hal 裏面的接口來進行命令以及數據的發送的。c++

關於控制通道的初始化以及創建的過程,這裏就不分析了,咱們主要看數據的流向和處理。咱們從控制通道的最後一個命令start 開始分析流程。api

咱們直接看a2dp hal 中out_write的實現: 服務器

static ssize_t out_write(struct audio_stream_out *stream, const void* buffer,
                         size_t bytes)
{
    struct a2dp_stream_out *out = (struct a2dp_stream_out *)stream;
    int sent;
...
    if (out->common.state == AUDIO_A2DP_STATE_SUSPENDED)
    {
        DEBUG("stream suspended");
        pthread_mutex_unlock(&out->common.lock);
        return -1;
    }

    /* only allow autostarting if we are in stopped or standby */
    if ((out->common.state == AUDIO_A2DP_STATE_STOPPED) ||
        (out->common.state == AUDIO_A2DP_STATE_STANDBY))
    {
        if (start_audio_datapath(&out->common) < 0)//新建audio patch
        {
            /* emulate time this write represents to avoid very fast write
               failures during transition periods or remote suspend */

            int us_delay = calc_audiotime(out->common.cfg, bytes);

            DEBUG("emulate a2dp write delay (%d us)", us_delay);

            usleep(us_delay);
            pthread_mutex_unlock(&out->common.lock);
            return -1;
        }
    }
    else if (out->common.state != AUDIO_A2DP_STATE_STARTED)
    {
        ERROR("stream not in stopped or standby");
        pthread_mutex_unlock(&out->common.lock);
        return -1;
    }

    pthread_mutex_unlock(&out->common.lock);
    sent = skt_write(out->common.audio_fd, buffer,  bytes);//發送數據到audio patch

    if (sent == -1)
    {
 /*錯誤處理*/
    }
    return sent;
}

 當a2dp 剛鏈接的時候,這邊的out->common.state 仍是standby 狀態,那麼首先要進行a2dp data patch的創建:併發

static int start_audio_datapath(struct a2dp_stream_common *common)
{
...

    int oldstate = common->state;
    common->state = AUDIO_A2DP_STATE_STARTING;//設置新的狀態

    int a2dp_status = a2dp_command(common, A2DP_CTRL_CMD_START);//向socket裏面寫數據,在bluedroid:btif_media_task.c裏面的btif_recv_ctrl_data將處理該指令
...

    /* connect socket if not yet connected */
    if (common->audio_fd == AUDIO_SKT_DISCONNECTED)
    {
        common->audio_fd = skt_connect(A2DP_DATA_PATH, common->buffer_sz);//以前已經在uipc_open裏面先新建了socket的服務器端,如今能夠鏈接
        ...
        common->state = AUDIO_A2DP_STATE_STARTED;
    }

    return 0;
}

 

這裏主要作了兩件事:app

  1. a2dp_command(common, A2DP_CTRL_CMD_START)
  2. skt_connect(A2DP_DATA_PATH, common->buffer_sz);

咱們分別看看,前者就是下發了 A2DP_CTRL_CMD_START這個命令,對應hci log 中的:socket

後者的做用是創建socket鏈接,供後續的數據的傳輸。ide

咱們先看 第一個流程:函數

static int a2dp_command(struct a2dp_stream_common *common, char cmd)
{
    char ack;

    DEBUG("A2DP COMMAND %s", dump_a2dp_ctrl_event(cmd));

    /* send command */
    if (send(common->ctrl_fd, &cmd, 1, MSG_NOSIGNAL) == -1)//發送到這個socket,btif_media_task這個線程去處理
    {
...
    }

    /* wait for ack byte */
    if (a2dp_ctrl_receive(common, &ack, 1) < 0)//接收返回的消息
        return -1;
...
    return 0;
}

 

 就是往以前創建好的  控制通道里面 寫數據進去。該command 由誰來處理呢?答案 是btif_a2dp_ctrl_cb ,咱們看看在btif_media_thread_init 作的事情:post

static void btif_media_thread_init(UNUSED_ATTR void *context) {
  memset(&btif_media_cb, 0, sizeof(btif_media_cb));
  UIPC_Init(NULL);

#if (BTA_AV_INCLUDED == TRUE)
  UIPC_Open(UIPC_CH_ID_AV_CTRL , btif_a2dp_ctrl_cb);//註冊了 btif_a2dp_ctrl_cb爲控制通道的處理函數
#endif

  raise_priority_a2dp(TASK_HIGH_MEDIA);
  media_task_running = MEDIA_TASK_STATE_ON;
}

 

接下來咱們看看  btif_a2dp_ctrl_cb 對於A2DP_CTRL_CMD_START的處理:

在UIPC的機制中,有數據來就會攜帶UIPC_RX_DATA_READY_EVT:

static void btif_a2dp_ctrl_cb(tUIPC_CH_ID ch_id, tUIPC_EVENT event)
{
    UNUSED(ch_id);

    switch(event)
    {
        case UIPC_OPEN_EVT:
...
            break;

        case UIPC_CLOSE_EVT:
...
            break;

        case UIPC_RX_DATA_READY_EVT:
            btif_recv_ctrl_data();
            break;

        default :
            APPL_TRACE_ERROR("### A2DP-CTRL-CHANNEL EVENT %d NOT HANDLED ###", event);
            break;
    }
}

 

 肯定是控制數據以後,路由到btif_recv_ctrl_data來處理:

static void btif_recv_ctrl_data(void)
{
    UINT8 cmd = 0;
    int n;
    n = UIPC_Read(UIPC_CH_ID_AV_CTRL, NULL, &cmd, 1);//先把數據讀出來
...
    btif_media_cb.a2dp_cmd_pending = cmd;

    switch(cmd)
    {
        case A2DP_CTRL_CMD_CHECK_READY:
...
            break;

        case A2DP_CTRL_CMD_START:
            /* Don't sent START request to stack while we are in call.
               Some headsets like the Sony MW600, don't allow AVDTP START
               in call and respond BAD_STATE. */
            if (!btif_hf_is_call_idle())//若是正在打電話,不發起鏈接
            {
                a2dp_cmd_acknowledge(A2DP_CTRL_ACK_INCALL_FAILURE);
                break;
            }

            if (btif_av_stream_ready() == TRUE)//若是已經準備好就 就進行socket 以及一些audio的處理
            {
                /* setup audio data channel listener */
                UIPC_Open(UIPC_CH_ID_AV_AUDIO, btif_a2dp_data_cb);

                /* post start event and wait for audio path to open */
                btif_dispatch_sm_event(BTIF_AV_START_STREAM_REQ_EVT, NULL, 0);

#if (BTA_AV_SINK_INCLUDED == TRUE)
                if (btif_media_cb.peer_sep == AVDT_TSEP_SRC)
                    a2dp_cmd_acknowledge(A2DP_CTRL_ACK_SUCCESS);
#endif
            }
            else if (btif_av_stream_started_ready())
            {
                /* already started, setup audio data channel listener
                   and ack back immediately */
                UIPC_Open(UIPC_CH_ID_AV_AUDIO, btif_a2dp_data_cb);

                a2dp_cmd_acknowledge(A2DP_CTRL_ACK_SUCCESS);
            }
            else
            {
                a2dp_cmd_acknowledge(A2DP_CTRL_ACK_FAILURE);
                break;
            }
            break;

        case A2DP_CTRL_CMD_STOP:
...
            break;

        case A2DP_CTRL_CMD_SUSPEND:
            /* local suspend */
            if (btif_av_stream_started_ready())
            {
                btif_dispatch_sm_event(BTIF_AV_SUSPEND_STREAM_REQ_EVT, NULL, 0);
            }
            else
            {
                /* if we are not in started state, just ack back ok and let
                   audioflinger close the channel. This can happen if we are
                   remotely suspended, clear REMOTE SUSPEND Flag */
                btif_av_clear_remote_suspend_flag();
                a2dp_cmd_acknowledge(A2DP_CTRL_ACK_SUCCESS);
            }
            break;

        case A2DP_CTRL_GET_AUDIO_CONFIG:
        {
...
            break;
        }

        default:
            APPL_TRACE_ERROR("UNSUPPORTED CMD (%d)", cmd);
            a2dp_cmd_acknowledge(A2DP_CTRL_ACK_FAILURE);
            break;
    }
    APPL_TRACE_DEBUG("a2dp-ctrl-cmd : %s DONE", dump_a2dp_ctrl_event(cmd));
}

 當前是A2DP_CTRL_CMD_START 這個命令:

若是當前正在打電話,那麼就不會發起鏈接。接下來就作兩件事:

  1. 創建起socket的服務端,等待鏈接。
  2. 發送BTIF_AV_START_STREAM_REQ_EVT這個事件到狀態機等待音頻通道打開。

咱們先看第一個:

  UIPC_Open(UIPC_CH_ID_AV_AUDIO, btif_a2dp_data_cb);

BOOLEAN UIPC_Open(tUIPC_CH_ID ch_id, tUIPC_RCV_CBACK *p_cback)
{
...
    switch(ch_id)
    {
       case UIPC_CH_ID_AV_AUDIO:
            uipc_setup_server_locked(ch_id, A2DP_DATA_PATH, p_cback);//先把socket的服務器端創建起來
            break;
...

 

 主要就是創建 socket的服務器端:並把服務器的socket fd 放入到uipc_main.ch[UIPC_CH_ID_AV_AUDIO].srvfd

    fd = create_server_socket(name);

    uipc_main.ch[ch_id].srvfd = fd;
    uipc_main.ch[ch_id].cback = cback;
    uipc_main.ch[ch_id].read_poll_tmo_ms = DEFAULT_READ_POLL_TMO_MS;

 

 

下面咱們 看看

btif_dispatch_sm_event(BTIF_AV_START_STREAM_REQ_EVT, NULL, 0);

/* used to pass events to AV statemachine from other tasks */
void btif_dispatch_sm_event(btif_av_sm_event_t event, void *p_data, int len)
{
    /* Switch to BTIF context */
    btif_transfer_context(btif_av_handle_event, event,
                          (char*)p_data, len, NULL);
}

 

把事件pass 到AV statemachine,

static void btif_av_handle_event(UINT16 event, char* p_param)
{
    btif_sm_dispatch(btif_av_cb.sm_handle, event, (void*)p_param);
    btif_av_event_free_data(event, p_param);
}

 

咱們看一下狀態機的輪轉:

static const btif_sm_handler_t btif_av_state_handlers[] =
{
    btif_av_state_idle_handler,
    btif_av_state_opening_handler,
    btif_av_state_opened_handler,
    btif_av_state_started_handler,
    btif_av_state_closing_handler
};

 

當前的狀態是 opend 的狀態,處理的handle 是btif_av_state_opened_handler:

看看其對於事件的處理:

 case BTIF_AV_START_STREAM_REQ_EVT:
            if (btif_av_cb.peer_sep != AVDT_TSEP_SRC)
                btif_a2dp_setup_codec();
            BTIF_TRACE_EVENT("BTIF_AV_START_STREAM_REQ_EVT begin BTA_AvStart libs_liu");
            BTA_AvStart();
            BTIF_TRACE_EVENT("BTIF_AV_START_STREAM_REQ_EVT end BTA_AvStart libs_liu");
            btif_av_cb.flags |= BTIF_AV_FLAG_PENDING_START;
            break;

 

首先保存並設置了codec的參數,而後BTA_AvStart(發送BTA_AV_API_START_EVT 消息),並設置btif_av_cb.flags |= BTIF_AV_FLAG_PENDING_START,標記爲pending start 狀態。

這裏主要分析一下BTA_AvStart的流程:

void BTA_AvStart(void)
{
    BT_HDR  *p_buf;

    if ((p_buf = (BT_HDR *) GKI_getbuf(sizeof(BT_HDR))) != NULL)
    {
        p_buf->event = BTA_AV_API_START_EVT;
        bta_sys_sendmsg(p_buf);
    }
}

 

發送了BTA_AV_API_START_EVT(0x1238),他是由bta_av_nsm_act來處理:

    bta_av_api_to_ssm,      /* BTA_AV_API_START_EVT */

 

咱們發現執行到steam state machine裏面了:

/*******************************************************************************
**
** Function         bta_av_api_to_ssm
**
** Description      forward the API request to stream state machine
**
**
** Returns          void
**
*******************************************************************************/
static void bta_av_api_to_ssm(tBTA_AV_DATA *p_data)
{
    int xx;
    UINT16 event = p_data->hdr.event - BTA_AV_FIRST_A2S_API_EVT + BTA_AV_FIRST_A2S_SSM_EVT;

    for(xx=0; xx<BTA_AV_NUM_STRS; xx++)/* maximum number of streams created: 1 for audio, 1 for video */
    {
        bta_av_ssm_execute(bta_av_cb.p_scb[xx], event, p_data);//確定是有 一個沒有註冊  由於 是video
    }
}

 

這裏就進入到stream statemachine的狀態機了,

AV Sevent(0x41)=0x120b(AP_START) state=3(OPEN)

 

當前的stream 的狀態機是 open 狀態。

/* AP_START_EVT */          {BTA_AV_DO_START,       BTA_AV_SIGNORE,        BTA_AV_OPEN_SST },

 

執行的action 是BTA_AV_DO_START,下一個狀態依然是open狀態,咱們看一下這個函數的實現:

/*******************************************************************************
**
** Function         bta_av_do_start
**
** Description      Start stream.
**
** Returns          void
**
*******************************************************************************/
void bta_av_do_start (tBTA_AV_SCB *p_scb, tBTA_AV_DATA *p_data)
{
    UINT8 policy = HCI_ENABLE_SNIFF_MODE;
    UINT8       cur_role;
...

    if ((p_scb->started == FALSE) && ((p_scb->role & BTA_AV_ROLE_START_INT) == 0))
    {
        p_scb->role |= BTA_AV_ROLE_START_INT;
        bta_sys_busy(BTA_ID_AV, bta_av_cb.audio_open_cnt, p_scb->peer_addr);

        AVDT_StartReq(&p_scb->avdt_handle, 1);//avdt start
    }
    else if (p_scb->started)
    {
    ...
    }
}

 

上面avdt start的過程就對應於hci log 中的

 到這裏上面的start_audio_datapath中的第一點a2dp_command(common, A2DP_CTRL_CMD_START)  已經基本分析完了,接下來咱們看看skt_connect(A2DP_DATA_PATH, common->buffer_sz);的流程,固然這個流程就簡單不少,他是作了一個socket的鏈接。鏈接的patch 是/data/misc/bluedroid/.a2dp_data ,

common->audio_fd = skt_connect(A2DP_DATA_PATH, common->buffer_sz);

那後續 audio 傳下來的數據只要寫到common->audio_fd就能夠了。

那關於  a2dp的數據通道的打開,這裏就分析結束了。

 接下來咱們看看音頻數據流:

當上面的socket鏈接上以後,第一件事就是 發送UIPC_OPEN_EVT和UIPC_RX_DATA_READY_EVT事件:

static int uipc_check_fd_locked(tUIPC_CH_ID ch_id)
{
    if (SAFE_FD_ISSET(uipc_main.ch[ch_id].srvfd, &uipc_main.read_set))
    {
        BTIF_TRACE_EVENT("INCOMING CONNECTION ON CH %d", ch_id);
        uipc_main.ch[ch_id].fd = accept_server_socket(uipc_main.ch[ch_id].srvfd);
...
        if (uipc_main.ch[ch_id].cback)
            uipc_main.ch[ch_id].cback(ch_id, UIPC_OPEN_EVT);//發送通道打開的通知
    }

    if (SAFE_FD_ISSET(uipc_main.ch[ch_id].fd, &uipc_main.read_set))
    {
        BTIF_TRACE_EVENT("INCOMING DATA ON CH %d", ch_id);

        if (uipc_main.ch[ch_id].cback)
            uipc_main.ch[ch_id].cback(ch_id, UIPC_RX_DATA_READY_EVT);//有數據過來
    }
    return 0;
}

 

 咱們先看看UIPC_OPEN_EVT的處理流程:

首先這裏的callback 就是btif_a2dp_data_cb,

static void btif_a2dp_data_cb(tUIPC_CH_ID ch_id, tUIPC_EVENT event)
{
    switch(event)
    {
        case UIPC_OPEN_EVT:

            /*  read directly from media task from here on (keep callback for
                connection events */
            UIPC_Ioctl(UIPC_CH_ID_AV_AUDIO, UIPC_REG_REMOVE_ACTIVE_READSET, NULL);//將uipc_main.ch[ch_id].fd移出uipc_main.active_set
            UIPC_Ioctl(UIPC_CH_ID_AV_AUDIO, UIPC_SET_READ_POLL_TMO,
                       (void *)A2DP_DATA_READ_POLL_MS);//設置uipc_main.ch[ch_id].read_poll_tmo_ms = 10,這裏是uipc poll的超時時間

            if (btif_media_cb.peer_sep == AVDT_TSEP_SNK) {

                /* Start the media task to encode SBC */
                btif_media_task_start_aa_req();//給media task 發送BTIF_MEDIA_START_AA_TX

                /* make sure we update any changed sbc encoder params */
                btif_a2dp_encoder_update();//更新sbc 參數相關
            }
            btif_media_cb.data_channel_open = TRUE;

            /* ack back when media task is fully started */
            break;

        case UIPC_CLOSE_EVT:
            a2dp_cmd_acknowledge(A2DP_CTRL_ACK_SUCCESS);
            btif_audiopath_detached();
            btif_media_cb.data_channel_open = FALSE;
            break;

        default :
            APPL_TRACE_ERROR("### A2DP-DATA EVENT %d NOT HANDLED ###", event);
            break;
    }
}

 

 咱們發現上面的函數居然沒有UIPC_RX_DATA_READY_EVT的處理流程,爲何呢?由於在UIPC_OPEN_EVT的處理中已經把這uipc_main.ch[ch_id].fd移出uipc_main.active_set

 UIPC_OPEN_EVT的處理中接着又設置了定時器,等用到的時候咱們再分析。下面咱們看看其給media task 發送BTIF_MEDIA_START_AA_TX的流程:

BOOLEAN btif_media_task_start_aa_req(void)
{
    BT_HDR *p_buf;
    if (NULL == (p_buf = GKI_getbuf(sizeof(BT_HDR))))
    {
        APPL_TRACE_EVENT("GKI failed");
        return FALSE;
    }

    p_buf->event = BTIF_MEDIA_START_AA_TX;

    fixed_queue_enqueue(btif_media_cmd_msg_queue, p_buf);//放到隊列,media task 會自動處理
    return TRUE;
}

 

在btif_a2dp_start_media_task 中,咱們已經綁定了media task 線程和隊列以及消息處理函數:

    fixed_queue_register_dequeue(btif_media_cmd_msg_queue,
        thread_get_reactor(worker_thread),
        btif_media_thread_handle_cmd,
        NULL);

 

 看看其處理:

static void btif_media_thread_handle_cmd(fixed_queue_t *queue, UNUSED_ATTR void *context)
{
    BT_HDR *p_msg = (BT_HDR *)fixed_queue_dequeue(queue);
    LOG_VERBOSE("btif_media_thread_handle_cmd : %d %s", p_msg->event,
             dump_media_event(p_msg->event));

    switch (p_msg->event)
    {
#if (BTA_AV_INCLUDED == TRUE)
    case BTIF_MEDIA_START_AA_TX:
        btif_media_task_aa_start_tx();
        break;

 

 繼續看:

/*******************************************************************************
 **
 ** Function         btif_media_task_aa_start_tx
 **
 ** Description      Start media task encoding
 **
 ** Returns          void
 **
 *******************************************************************************/
static void btif_media_task_aa_start_tx(void)
{

    /* Use a timer to poll the UIPC, get rid of the UIPC call back */

    btif_media_cb.is_tx_timer = TRUE;
    last_frame_us = 0;

    /* Reset the media feeding state */
    btif_media_task_feeding_state_reset();

    btif_media_cb.media_alarm = alarm_new();

    alarm_set_periodic(btif_media_cb.media_alarm, BTIF_MEDIA_TIME_TICK, btif_media_task_alarm_cb, NULL);

}

 

這邊設置了一個定時器,#define BTIF_MEDIA_TIME_TICK                     (20 * BTIF_MEDIA_NUM_TICK)

每20ms 去讀一次數據,而不是經過UIPC的回調函數來操做。

 咱們繼續看看 定時器的函數實現:

static void btif_media_task_alarm_cb(UNUSED_ATTR void *context) {
  thread_post(worker_thread, btif_media_task_aa_handle_timer, NULL);//media task 線程中執行
}

 

 

static void btif_media_task_aa_handle_timer(UNUSED_ATTR void *context)
{
    log_tstamps_us("media task tx timer");

#if (BTA_AV_INCLUDED == TRUE)
    if(btif_media_cb.is_tx_timer == TRUE)//前面已經標記這個位
    {
        btif_media_send_aa_frame();//函數名send,可是應該包含先包含讀audio數據的操做
    }
    else
    {
        APPL_TRACE_ERROR("ERROR Media task Scheduled after Suspend");
    }
#endif
}

 

 下面咱們分析一下btif_media_send_aa_frame:

/*******************************************************************************
 **
 ** Function         btif_media_send_aa_frame
 **
 ** Description
 **
 ** Returns          void
 **
 *******************************************************************************/
static void btif_media_send_aa_frame(void)
{
    UINT8 nb_frame_2_send;

    /* get the number of frame to send */
    nb_frame_2_send = btif_get_num_aa_frame();//計算出應該獲取的frame的數量,是根據時間間隔來計算的

    if (nb_frame_2_send != 0)
    {
        /* format and Q buffer to send */
        btif_media_aa_prep_2_send(nb_frame_2_send);//讀取數據,並放置到隊列裏面
    }

    /* send it */
    LOG_VERBOSE("btif_media_send_aa_frame : send %d frames", nb_frame_2_send);
    bta_av_ci_src_data_ready(BTA_AV_CHNL_AUDIO);//發送數據
}

 這裏重點分析一下btif_media_aa_prep_2_send和bta_av_ci_src_data_ready

btif_media_aa_prep_2_send

/*******************************************************************************
 **
 ** Function         btif_media_aa_prep_2_send
 **
 ** Description
 **
 ** Returns          void
 **
 *******************************************************************************/

static void btif_media_aa_prep_2_send(UINT8 nb_frame)
{
    // Check for TX queue overflow,隊列btif_media_cb.TxAaQ數據太多就會丟棄一些
    while (GKI_queue_length(&btif_media_cb.TxAaQ) > (MAX_OUTPUT_A2DP_FRAME_QUEUE_SZ - nb_frame))
        GKI_freebuf(GKI_dequeue(&(btif_media_cb.TxAaQ)));

    // Transcode frame

    switch (btif_media_cb.TxTranscoding)
    {
    case BTIF_MEDIA_TRSCD_PCM_2_SBC:
        btif_media_aa_prep_sbc_2_send(nb_frame);
        break;

    default:
...
    }
}

咱們繼續看btif_media_aa_prep_sbc_2_send的實現:

/*******************************************************************************
 **
 ** Function         btif_media_aa_prep_sbc_2_send
 **
 ** Description
 **
 ** Returns          void
 **
 *******************************************************************************/
static void btif_media_aa_prep_sbc_2_send(UINT8 nb_frame)
{
    BT_HDR * p_buf;
    UINT16 blocm_x_subband = btif_media_cb.encoder.s16NumOfSubBands *
                             btif_media_cb.encoder.s16NumOfBlocks;

    while (nb_frame)
    {
        if (NULL == (p_buf = GKI_getpoolbuf(BTIF_MEDIA_AA_POOL_ID)))
        {
...
            return;
        }

        /* Init buffer */
        p_buf->offset = BTIF_MEDIA_AA_SBC_OFFSET;
        p_buf->len = 0;
        p_buf->layer_specific = 0;

        do
        {
            /* Write @ of allocated buffer in encoder.pu8Packet */
            btif_media_cb.encoder.pu8Packet = (UINT8 *) (p_buf + 1) + p_buf->offset + p_buf->len;
            /* Fill allocated buffer with 0 */
            memset(btif_media_cb.encoder.as16PcmBuffer, 0, blocm_x_subband
                    * btif_media_cb.encoder.s16NumOfChannels);

            /* Read PCM data and upsample them if needed */
            if (btif_media_aa_read_feeding(UIPC_CH_ID_AV_AUDIO))//讀audio的數據
            {
                /* SBC encode and descramble frame */
                SBC_Encoder(&(btif_media_cb.encoder));//sbc 編碼相關
                A2D_SbcChkFrInit(btif_media_cb.encoder.pu8Packet);
                A2D_SbcDescramble(btif_media_cb.encoder.pu8Packet, btif_media_cb.encoder.u16PacketLength);
                /* Update SBC frame length */
                p_buf->len += btif_media_cb.encoder.u16PacketLength;
                nb_frame--;//frame numb --
                p_buf->layer_specific++;
            }
            else//沒有讀到數據
            {
                APPL_TRACE_WARNING("btif_media_aa_prep_sbc_2_send underflow %d, %d",
                    nb_frame, btif_media_cb.media_feeding_state.pcm.aa_feed_residue);
               /*須要把應該發送的數據量+回來*/
                btif_media_cb.media_feeding_state.pcm.counter += nb_frame *
                     btif_media_cb.encoder.s16NumOfSubBands *
                     btif_media_cb.encoder.s16NumOfBlocks *
                     btif_media_cb.media_feeding.cfg.pcm.num_channel *
                     btif_media_cb.media_feeding.cfg.pcm.bit_per_sample / 8;
                /* no more pcm to read */
                nb_frame = 0;
...
            }

        } while (((p_buf->len + btif_media_cb.encoder.u16PacketLength) < btif_media_cb.TxAaMtuSize)
                && (p_buf->layer_specific < 0x0F) && nb_frame);

        if(p_buf->len)
        {
            /* timestamp of the media packet header represent the TS of the first SBC frame
               i.e the timestamp before including this frame */
            *((UINT32 *) (p_buf + 1)) = btif_media_cb.timestamp;

            btif_media_cb.timestamp += p_buf->layer_specific * blocm_x_subband;
...
            /* Enqueue the encoded SBC frame in AA Tx Queue */
            GKI_enqueue(&(btif_media_cb.TxAaQ), p_buf);//加入到隊列
        }
        else
        {
            GKI_freebuf(p_buf);
        }
    }
}

 咱們如今分析一下btif_media_aa_read_feeding(UIPC_CH_ID_AV_AUDIO) 的流程:

/*******************************************************************************
 **
 ** Function         btif_media_aa_read_feeding
 **
 ** Description
 **
 ** Returns          void
 **
 *******************************************************************************/

BOOLEAN btif_media_aa_read_feeding(tUIPC_CH_ID channel_id)
{
    UINT16 event;
    UINT16 blocm_x_subband = btif_media_cb.encoder.s16NumOfSubBands * \
                             btif_media_cb.encoder.s16NumOfBlocks;
    UINT32 read_size;
    UINT16 sbc_sampling = 48000;
    UINT32 src_samples;
    UINT16 bytes_needed = blocm_x_subband * btif_media_cb.encoder.s16NumOfChannels * \
                          btif_media_cb.media_feeding.cfg.pcm.bit_per_sample / 8;
    static UINT16 up_sampled_buffer[SBC_MAX_NUM_FRAME * SBC_MAX_NUM_OF_BLOCKS
            * SBC_MAX_NUM_OF_CHANNELS * SBC_MAX_NUM_OF_SUBBANDS * 2];
    static UINT16 read_buffer[SBC_MAX_NUM_FRAME * SBC_MAX_NUM_OF_BLOCKS
            * SBC_MAX_NUM_OF_CHANNELS * SBC_MAX_NUM_OF_SUBBANDS];
    UINT32 src_size_used;
    UINT32 dst_size_used;
    BOOLEAN fract_needed;
    INT32   fract_max;
    INT32   fract_threshold;
    UINT32  nb_byte_read;

    /* Get the SBC sampling rate */
    switch (btif_media_cb.encoder.s16SamplingFreq)
    {
    case SBC_sf48000:
        sbc_sampling = 48000;
        break;
    case SBC_sf44100:
        sbc_sampling = 44100;
        break;
    case SBC_sf32000:
        sbc_sampling = 32000;
        break;
    case SBC_sf16000:
        sbc_sampling = 16000;
        break;
    }

    if (sbc_sampling == btif_media_cb.media_feeding.cfg.pcm.sampling_freq) {//btif_a2dp_setup_codec中設置media_feeding.cfg.pcm.sampling_freq = 44.1
        read_size = bytes_needed - btif_media_cb.media_feeding_state.pcm.aa_feed_residue;
        nb_byte_read = UIPC_Read(channel_id, &event,
                  ((UINT8 *)btif_media_cb.encoder.as16PcmBuffer) +
                  btif_media_cb.media_feeding_state.pcm.aa_feed_residue,
                  read_size);
        if (nb_byte_read == read_size) {
            btif_media_cb.media_feeding_state.pcm.aa_feed_residue = 0;
            return TRUE;
        } else {//沒有讀到預期的數據,打印underflow
            APPL_TRACE_WARNING("### UNDERFLOW :: ONLY READ %d BYTES OUT OF %d ###",
                nb_byte_read, read_size);
            btif_media_cb.media_feeding_state.pcm.aa_feed_residue += nb_byte_read;
            return FALSE;
        }
    }

  ...

 

 btif_media_cb.encoder.s16SamplingFreq這裏注意到是用btif_media_cb.encoder.s16SamplingFreq 來調節sbc_sampling 的大小的,那麼btif_media_cb.encoder.s16SamplingFreq又是哪裏設置的呢?

咱們先看看btif_media_cb.media_feeding.cfg.pcm.sampling_freq的設置:

 

void btif_a2dp_setup_codec(void)
{
    tBTIF_AV_MEDIA_FEEDINGS media_feeding;
    tBTIF_STATUS status;

    APPL_TRACE_EVENT("## A2DP SETUP CODEC ##");

    GKI_disable();

    /* for now hardcode 44.1 khz 16 bit stereo PCM format */
    media_feeding.cfg.pcm.sampling_freq = 44100;//直接設置
    media_feeding.cfg.pcm.bit_per_sample = 16;

  btif_media_cb.encoder.s16SamplingFreq的設置

/*******************************************************************************
 **
 ** Function         btif_media_task_pcm2sbc_init
 **
 ** Description      Init encoding task for PCM to SBC according to feeding
 **
 ** Returns          void
 **
 *******************************************************************************/
static void btif_media_task_pcm2sbc_init(tBTIF_MEDIA_INIT_AUDIO_FEEDING * p_feeding)
{
    BOOLEAN reconfig_needed = FALSE;

    APPL_TRACE_DEBUG("PCM feeding:");
    APPL_TRACE_DEBUG("sampling_freq:%d", p_feeding->feeding.cfg.pcm.sampling_freq);
    APPL_TRACE_DEBUG("num_channel:%d", p_feeding->feeding.cfg.pcm.num_channel);
    APPL_TRACE_DEBUG("bit_per_sample:%d", p_feeding->feeding.cfg.pcm.bit_per_sample);

    /* Check the PCM feeding sampling_freq */
    switch (p_feeding->feeding.cfg.pcm.sampling_freq)//根據feeding.cfg.pcm.sampling_freq的值來設置btif_media_cb.encoder.s16SamplingFreq
    {
        case  8000:
        case 12000:
        case 16000:
        case 24000:
        case 32000:
        case 48000:
            /* For these sampling_freq the AV connection must be 48000 */
            if (btif_media_cb.encoder.s16SamplingFreq != SBC_sf48000)
            {
                /* Reconfiguration needed at 48000 */
                APPL_TRACE_DEBUG("SBC Reconfiguration needed at 48000");
                btif_media_cb.encoder.s16SamplingFreq = SBC_sf48000;
                reconfig_needed = TRUE;
            }
            break;

        case 11025:
        case 22050:
        case 44100:
            /* For these sampling_freq the AV connection must be 44100 */
            if (btif_media_cb.encoder.s16SamplingFreq != SBC_sf44100)
            {
                /* Reconfiguration needed at 44100 */
                APPL_TRACE_DEBUG("SBC Reconfiguration needed at 44100");
                btif_media_cb.encoder.s16SamplingFreq = SBC_sf44100;
                reconfig_needed = TRUE;
            }
            break;
        default:
            APPL_TRACE_DEBUG("Feeding PCM sampling_freq unsupported");
            break;
    }

 

 關於採樣率關係:根據feeding.cfg.pcm.sampling_freq的值來設置btif_media_cb.encoder.s16SamplingFreq,encoder的參數也就是sbc的參數。那麼也就是說在協議棧中,feeding.cfg.pcm.sampling_freq的值是具體決定性的,其設置在btif_a2dp_setup_codec 中。

關於btif_media_aa_prep_2_send的分析就到這裏,如今咱們接着分析bta_av_ci_src_data_ready的流程:

bta_av_ci_src_data_ready

 

/*******************************************************************************
**
** Function         bta_av_ci_src_data_ready
**
** Description      This function sends an event to the AV indicating that
**                  the phone has audio stream data ready to send and AV
**                  should call bta_av_co_audio_src_data_path() or
**                  bta_av_co_video_src_data_path().
**
** Returns          void
**
*******************************************************************************/
void bta_av_ci_src_data_ready(tBTA_AV_CHNL chnl)
{
    BT_HDR  *p_buf;

    if ((p_buf = (BT_HDR *) GKI_getbuf(sizeof(BT_HDR))) != NULL)
    {
        p_buf->layer_specific   = chnl;
        p_buf->event = BTA_AV_CI_SRC_DATA_READY_EVT;
        bta_sys_sendmsg(p_buf);
    }
}

執行的函數:

    bta_av_ci_data,         /* BTA_AV_CI_SRC_DATA_READY_EVT */

 

 

/*******************************************************************************
**
** Function         bta_av_ci_data
**
** Description      forward the BTA_AV_CI_SRC_DATA_READY_EVT to stream state machine
**
**
** Returns          void
**
*******************************************************************************/
static void bta_av_ci_data(tBTA_AV_DATA *p_data)
{
    tBTA_AV_SCB *p_scb;
    int     i;
    UINT8   chnl = (UINT8)p_data->hdr.layer_specific;

    for( i=0; i < BTA_AV_NUM_STRS; i++ )
    {
        p_scb = bta_av_cb.p_scb[i];

        if(p_scb && p_scb->chnl == chnl)
        {
            bta_av_ssm_execute(p_scb, BTA_AV_SRC_DATA_READY_EVT, p_data);
        }
    }
}

 

這裏bta_av_ssm_execute(p_scb, BTA_AV_SRC_DATA_READY_EVT, p_data); 看看其流程:

AV Sevent(0x41)=0x1211(SRC_DATA_READY) state=3(OPEN)
/* SRC_DATA_READY_EVT */    {BTA_AV_DATA_PATH,      BTA_AV_SIGNORE,        BTA_AV_OPEN_SST },

 

執行的action是BTA_AV_DATA_PATH,具體函數以下:

/*******************************************************************************
**
** Function         bta_av_data_path
**
** Description      Handle stream data path.
**
** Returns          void
**
*******************************************************************************/
void bta_av_data_path (tBTA_AV_SCB *p_scb, tBTA_AV_DATA *p_data)
{
    BT_HDR  *p_buf = NULL;
    UINT32  data_len;
    UINT32  timestamp;
    BOOLEAN new_buf = FALSE;
    UINT8   m_pt = 0x60 | p_scb->codec_type;
    tAVDT_DATA_OPT_MASK     opt;
    UNUSED(p_data);

    //Always get the current number of bufs que'd up
    p_scb->l2c_bufs = (UINT8)L2CA_FlushChannel (p_scb->l2c_cid, L2CAP_FLUSH_CHANS_GET);

    if (!list_is_empty(p_scb->a2d_list)) {
        p_buf = (BT_HDR *)list_front(p_scb->a2d_list);
        list_remove(p_scb->a2d_list, p_buf);
         /* use q_info.a2d data, read the timestamp */
        timestamp = *(UINT32 *)(p_buf + 1);
    }
    else
    {
        new_buf = TRUE;
        /* a2d_list empty, call co_data, dup data to other channels */
        p_buf = (BT_HDR *)p_scb->p_cos->data(p_scb->codec_type, &data_len,
                                         &timestamp);//組件數據準備發送

        if (p_buf)
        {
            /* use the offset area for the time stamp */
            *(UINT32 *)(p_buf + 1) = timestamp;
...
        }
    }

    if(p_buf)
    {
        if(p_scb->l2c_bufs < (BTA_AV_QUEUE_DATA_CHK_NUM))
        {
            /* there's a buffer, just queue it to L2CAP */
            /*  There's no need to increment it here, it is always read from L2CAP see above */
            /* p_scb->l2c_bufs++; */
            /*
            APPL_TRACE_ERROR("qw: %d", p_scb->l2c_bufs);
            */

            /* opt is a bit mask, it could have several options set */
            opt = AVDT_DATA_OPT_NONE;
            if (p_scb->no_rtp_hdr)
            {
                opt |= AVDT_DATA_OPT_NO_RTP;
            }

            AVDT_WriteReqOpt(p_scb->avdt_handle, p_buf, timestamp, m_pt, opt);//真正發送
            p_scb->cong = TRUE;
        }
        else
        {
            /* there's a buffer, but L2CAP does not seem to be moving data */
            if(new_buf)
            {
                /* just got this buffer from co_data,
                 * put it in queue */
                list_append(p_scb->a2d_list, p_buf);
            }
            else
            {
                /* just dequeue it from the a2d_list */
                if (list_length(p_scb->a2d_list) < 3) {
                    /* put it back to the queue */
                    list_prepend(p_scb->a2d_list, p_buf);
                }
                else
                {
                    /* too many buffers in a2d_list, drop it. */
                    bta_av_co_audio_drop(p_scb->hndl);
                    GKI_freebuf(p_buf);
                }
            }
        }
    }
}

 

這裏主要分析p_scb->p_cos->data(p_scb->codec_type, &data_len,&timestamp);的流程。

/* the call out functions for audio stream */
const tBTA_AV_CO_FUNCTS bta_av_a2d_cos =
{
    bta_av_co_audio_init,
    bta_av_co_audio_disc_res,
    bta_av_co_audio_getconfig,
    bta_av_co_audio_setconfig,
    bta_av_co_audio_open,
    bta_av_co_audio_close,
    bta_av_co_audio_start,
    bta_av_co_audio_stop,
    bta_av_co_audio_src_data_path,
    bta_av_co_audio_delay
};

 

執行的函數是

/*******************************************************************************
 **
 ** Function         bta_av_co_audio_src_data_path
 **
 ** Description      This function is called to manage data transfer from
 **                  the audio codec to AVDTP.
 **
 ** Returns          Pointer to the GKI buffer to send, NULL if no buffer to send
 **
 *******************************************************************************/
void * bta_av_co_audio_src_data_path(tBTA_AV_CODEC codec_type, UINT32 *p_len,
                                     UINT32 *p_timestamp)
{
    BT_HDR *p_buf;
    UNUSED(p_len);

    FUNC_TRACE();

    p_buf = btif_media_aa_readbuf();// return GKI_dequeue(&(btif_media_cb.TxAaQ));
    if (p_buf != NULL)
    {
        switch (codec_type)
        {
        case BTA_AV_CODEC_SBC:
            /* In media packet SBC, the following information is available:
             * p_buf->layer_specific : number of SBC frames in the packet
             * p_buf->word[0] : timestamp
             */
            /* Retrieve the timestamp information from the media packet */
            *p_timestamp = *((UINT32 *) (p_buf + 1));

            /* Set up packet header */
            bta_av_sbc_bld_hdr(p_buf, p_buf->layer_specific);
            break;


        default:
            APPL_TRACE_ERROR("bta_av_co_audio_src_data_path Unsupported codec type (%d)", codec_type);
            break;
        }
    }
    return p_buf;
}

 

這裏咱們主要是明白最終數據是從btif_media_cb.TxAaQ 中取出來了,而後通過AVDT_WriteReqOpt 發送到設備。

 作個簡單總結,主要有以下的要點:

  1. 經過audio 的控制通道發送A2DP_CTRL_CMD_START 命令
  2. 打開數據通道: UIPC_Open(UIPC_CH_ID_AV_AUDIO, btif_a2dp_data_cb);
  3. 經過socket 鏈接上數據通道:common->audio_fd = skt_connect(A2DP_DATA_PATH, common->buffer_sz);
  4. 平臺audio 往socket:common->audio_fd 發送數據
  5. 設置定時器,每隔20ms 去讀audio的數據alarm_set_periodic(btif_media_cb.media_alarm, BTIF_MEDIA_TIME_TICK, btif_media_task_alarm_cb, NULL),併發送到設備

 那到這裏a2dp的數據發送的過程就分析完了。

相關文章
相關標籤/搜索