一、代码分析

在公司项目中,音频解码及播放是把数据传到Java层进行解码播放的,其实这个步骤没有必要,完全可以在底层进行处理。

通过代码发现其实也做了在底层进行解码,那么为啥不直接使用底层解码播放呢,我们可以看看原先代码怎么做的:







代码中通过定义的宏DECODE_AUDIO_IN_JAVA来控制mAudioCodec对象是否创建,然后在通过mAudioCodec对象是否为null来控制音频数据是否传给Java层处理,代码中原来已经支持了在底层解码然后在传回上传使用AudioTrack进行播放,那我求改宏DECODE_AUDIO_IN_JAVA来让其在底层进行解码,运行后会发现播放的声音非常的卡顿。

二、解决办法

最终发现原来是在使用底层处理时播放的音频是数据大小传的不对,会导致播放的声音非常的卡顿。

解决办法就是将下面红框的修改成info.size就可以了。

三、底层播放音频

但是这样还是将音频的播放传给Java层进行播放。

我们可以通过使用OpenSLES来处理底层音频的播放

3.1 OpenSLRender类的实现

#ifndef _OPENSLRENDER_HEAD_
#define _OPENSLRENDER_HEAD_
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h> namespace android{
class OpenSLRender : public Thread {
public:
OpenSLRender(int64_t buffertime,int32_t bufferframes=5);
~OpenSLRender();
bool init(int32_t chanNum,int rate);
void stop();
void setBufferTimes(int64_t buffertime);
void queueInputBuffer(sp<ABuffer> data);
void playerCallback();
private:
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
SLObjectItf bqPlayerObject;
SLPlayItf bqPlayerPlay;
SLObjectItf outputMixObject;
SLObjectItf engineObject; List<sp<ABuffer>> mList;
int64_t mBufferTimeUs;
int32_t mBufferFrames;
int64_t mLasPts;
bool bFist;
pthread_mutex_t startMutex;
pthread_cond_t startCond; Mutex dataMutex; // for data in/out on diffrent thread
bool bRun;
sp<ABuffer> mMuteData;
int64_t mlastAudtime;
int mPlayAudFrames;
int mDropFrames;
int32_t muteCounts;
sp<ABuffer> mRenderData;
int32_t mOverFlowContinuetimes;
private:
virtual bool threadLoop();
sp<ABuffer> intervalOut(int gap);
sp<ABuffer> dropToMaxBuffer(int gap);
sp<ABuffer> dropAutoNums();
sp<ABuffer> getNextBuffer();
void destroy();
static void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context);
DISALLOW_EVIL_CONSTRUCTORS(OpenSLRender);
};
}
#endif
//#define LOG_NDEBUG 0
#define LOG_TAG "OpenSLRender" #include"OpenSLRender.h" #define UNUSED(x) ((void)x)
#define AUD_DROP_THRESHOLD 5
namespace android{
OpenSLRender::OpenSLRender(int64_t bufferTime,int32_t bufferFrames):
mBufferTimeUs(bufferTime),
mBufferFrames(bufferFrames),
bFist(true),
startMutex(PTHREAD_MUTEX_INITIALIZER),
startCond(PTHREAD_COND_INITIALIZER),
bRun(true),
mRenderData(NULL),
mMuteData(new ABuffer(2048)),
muteCounts(0),
mlastAudtime(0),
mPlayAudFrames(0),
mDropFrames(0),
mOverFlowContinuetimes(0){
memset(mMuteData->data(),0,mMuteData->size());
}
OpenSLRender::~OpenSLRender(){
stop();
requestExit();
requestExitAndWait();
//this.clear(); //sp<>.clear, this is not sp
} bool OpenSLRender::init(int32_t chanNum,int rate){
// engine interfaces
SLEngineItf engineEngine; // output mix interfaces
SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL; // aux effect on the output mix, used by the buffer queue player
const SLEnvironmentalReverbSettings reverbSettings =
SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT; // buffer queue player interfaces
SLresult result; // create engine
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // realize the engine
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // get the engine interface, which is needed in order to create other objects
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // create output mix, with environmental reverb specified as a non-required interface
const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
const SLboolean req[1] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // realize the output mix
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // get the environmental reverb interface
// this could fail if the environmental reverb effect is not available,
// either because the feature is not present, excessive CPU load, or
// the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
&outputMixEnvironmentalReverb);
if (SL_RESULT_SUCCESS == result) {
result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
outputMixEnvironmentalReverb, &reverbSettings);
(void)result;
}
// ignore unsuccessful result codes for environmental reverb, as it is optional for this example {
// configure audio source
SLuint32 samplesPerSec = SL_SAMPLINGRATE_48;
if(48000 == rate){
samplesPerSec = SL_SAMPLINGRATE_48;
}else if(44100 == rate){
samplesPerSec = SL_SAMPLINGRATE_44_1;
}
SLuint32 audChan = chanNum;
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM,
(audChan == 0) ? 2 : audChan,
samplesPerSec,
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,//
SL_BYTEORDER_LITTLEENDIAN};
/*
* Enable Fast Audio when possible: once we set the same rate to be the native, fast audio path
* will be triggered
*/
SLDataSource audioSrc = {&loc_bufq, &format_pcm}; // configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL}; /*
* create audio player:
* fast audio does not support when SL_IID_EFFECTSEND is required, skip it
* for fast audio case
*/
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME,/* SL_IID_EFFECTSEND,
SL_IID_MUTESOLO,*/};
const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
/*SL_BOOLEAN_TRUE,*/ }; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
2, ids, req);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // realize the player
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // get the play interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // get the buffer queue interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
CHECK(SL_RESULT_SUCCESS == result);
(void)result;
// register callback on the buffer queue
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, this);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; // set the player's state to playing
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
CHECK(SL_RESULT_SUCCESS == result);
(void)result; status_t err = run("opensl buffering", ANDROID_PRIORITY_AUDIO);
CHECK(err==OK);
return true;
}
} void OpenSLRender:: destroy(){
ALOGE("opeslRender destroy ![%s][%d]",__FUNCTION__,__LINE__);
(*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_STOPPED);
// destroy buffer queue audio player object, and invalidate all associated interfaces
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
} // destroy output mix object, and invalidate all associated interfaces
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
} // destroy engine object, and invalidate all associated interfaces
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
}
}
void OpenSLRender::stop(){
// AutoMutex _l(dataMutex);
ALOGE("OpenSLRender_stop:[%s%d]",__FUNCTION__,__LINE__);
if(bRun==true){
bRun=false;
destroy();
}
}
//to support Adjustment
void OpenSLRender::setBufferTimes(int64_t buffertime){
AutoMutex _l(dataMutex);
mBufferTimeUs = buffertime;
}
void OpenSLRender::queueInputBuffer(sp<ABuffer> data){
//input buffer, becareful!!!!!!!
AutoMutex _l(dataMutex);
//to chek pts
if(!mList.empty()){
sp<ABuffer> max = *(--mList.end());
int64_t dataPts=0,maxPts=0;
CHECK(data->meta()->findInt64("timePts", &dataPts));
CHECK(max->meta()->findInt64("timePts", &maxPts));
if(dataPts < maxPts){
ALOGD("[%s%d] pts erro data:%ld list:%ld\n",__FUNCTION__,__LINE__,maxPts,dataPts);
return;
}
}
#if ENABLE_STATISTICS
ALOGD(COMMON_DEBUG," Audio in, %lld remain __%ld__ [%s%d]\n",ALooper::GetNowUs(), mList.size(),__FUNCTION__,__LINE__);
#endif
mList.push_back(data);
if(bFist){
sp<ABuffer> min = *mList.begin();
sp<ABuffer> max = *(--mList.end());
int64_t minPts=0,maxPts=0;
CHECK(min->meta()->findInt64("timePts", &minPts));
CHECK(max->meta()->findInt64("timePts", &maxPts));
// ALOGE("==minPts=%lld,maxPts:%lld,mBufferTimeUs:%lld,(maxPts - minPts)=%lld",minPts,maxPts,mBufferTimeUs,(maxPts - minPts));
//if((maxPts - minPts > mBufferTimeUs) || mList.size()>=mBufferFrames){
if((maxPts - minPts > mBufferTimeUs/2) || mList.size()>=mBufferFrames/2){
//buffer over! go---------
pthread_mutex_lock(&startMutex);
pthread_cond_signal(&startCond);
pthread_mutex_unlock(&startMutex);
}
}
}
void OpenSLRender::playerCallback(){
AutoMutex _l(dataMutex);
if(!bRun){
return;
}
int64_t nowUs = ALooper::GetNowUs();
if(!mList.empty()){
sp<ABuffer> min = *mList.begin();
sp<ABuffer> max = *(--mList.end());
int64_t minPts=0,maxPts=0;
CHECK(min->meta()->findInt64("timePts", &minPts));
CHECK(max->meta()->findInt64("timePts", &maxPts));
//if(maxPts - minPts > mBufferTimeUs -timeDuration){
if(mList.size()>=mBufferFrames) {
mOverFlowContinuetimes++;
}else{
mOverFlowContinuetimes = 0;
}
if(mOverFlowContinuetimes > AUD_DROP_THRESHOLD)
{
//"Break out"
//Take one output to render for every two buffers
//data = intervalOut(2);
//data = dropAutoNums();
int flowFrames = mList.size() - mBufferFrames;
if( flowFrames >= mBufferFrames){
//ALOGD(COMMON_DEBUG,"video jetterbuff dopallflows %d [%s%d] mList.size():%ld \n",flowFrames,__FUNCTION__,__LINE__,mList.size());
sp<ABuffer> data = dropToMaxBuffer(flowFrames);
mRenderData = getNextBuffer();
}else{
//"Break out"
//Take one output to render for every two buffers
sp<ABuffer> data = dropToMaxBuffer(2);
mRenderData = getNextBuffer();
}
mOverFlowContinuetimes = 0;
}else{
//one by one
mRenderData = getNextBuffer();
mPlayAudFrames++;
}
}else{
mRenderData = mMuteData;
muteCounts++;
mOverFlowContinuetimes = 0;
}
if(mRenderData ==NULL){
//just give the mutex data
mRenderData = mMuteData;
muteCounts++;
}
SLresult result;
//enqueue another buffer
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, mRenderData->data(), mRenderData->size());
// the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
// which for this code example would indicate a programming error
if (SL_RESULT_SUCCESS != result) {
} if(!mlastAudtime)
{
mlastAudtime = nowUs;
}
if(nowUs - mlastAudtime >= 1000*1000)
{
ALOGE("playback(%d) droped(%d) muteCounts(%d) frames in one second,QSize:%d",mPlayAudFrames,mDropFrames,muteCounts,(int32_t)mList.size());
mDropFrames = 0;
mPlayAudFrames = 0;
mlastAudtime = nowUs;
muteCounts = 0;
}
} void OpenSLRender::bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context){
UNUSED(bq);
OpenSLRender * pRender =static_cast<OpenSLRender*>(context);
if(pRender){
pRender->playerCallback();
}
} sp<ABuffer> OpenSLRender::intervalOut(int gap){
int count =0;
sp<ABuffer> data = NULL;
while( (data = getNextBuffer())!=NULL && ++count < gap){
//ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
}
return data;
} sp<ABuffer> OpenSLRender::dropToMaxBuffer(int gap){
sp<ABuffer> data = NULL;
int count = 0;
while( (data = getNextBuffer())!=NULL && count++ < gap){
mDropFrames++;
//ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
}
return data;
}
sp<ABuffer> OpenSLRender::dropAutoNums(){
sp<ABuffer> data = NULL;
while( (data = getNextBuffer())!=NULL && muteCounts>0){
muteCounts--;
//ALOGD(COMMON_DEBUG," audio drop one [%s%d] remain mList.size():%ld \n",__FUNCTION__,__LINE__,mList.size());
}
return data;
} sp<ABuffer> OpenSLRender::getNextBuffer(){
if(!mList.empty()){
sp<ABuffer> data = *mList.begin();
mList.erase(mList.begin());
return data;
}
return NULL;
} bool OpenSLRender::threadLoop(){
if(bFist){
pthread_mutex_lock(&startMutex);
pthread_cond_wait(&startCond,&startMutex);
pthread_mutex_unlock(&startMutex);
ALOGE("[%s%d]start out\n",__FUNCTION__,__LINE__);
bFist = false;
}
//to start play
playerCallback();
return false;
}
}

3.2 OpenSLRender类的使用

3.2.1 创建OpenSLRender对象并初始化

3.2.2 OpenSLRender的数据处理播放

3.2.3 OpenSLRender的停止

投屏Sink端音频底层解码并用OpenSLES进行播放的更多相关文章

  1. iphone 与 PC端电脑投屏设置

    1. iphone端安装: 屏幕投影助手 下载地址 https://itunes.apple.com/cn/app/ping-mu-tou-ying-zhu-shou/id1152332174?mt= ...

  2. FFMPEG视音频编解码零基础学习方法-b

    感谢大神分享,虽然现在还看不懂,留着大家一起看啦 PS:有不少人不清楚“FFmpeg”应该怎么读.它读作“ef ef em peg” 0. 背景知识 本章主要介绍一下FFMPEG都用在了哪里(在这里仅 ...

  3. [总结]FFMPEG视音频编解码零基础学习方法

    在CSDN上的这一段日子,接触到了很多同行业的人,尤其是使用FFMPEG进行视音频编解码的人,有的已经是有多年经验的“大神”,有的是刚开始学习的初学者.在和大家探讨的过程中,我忽然发现了一个问题:在“ ...

  4. [转载] FFMPEG视音频编解码零基础学习方法

    在CSDN上的这一段日子,接触到了很多同行业的人,尤其是使用FFMPEG进行视音频编解码的人,有的已经是有多年经验的“大神”,有的是刚开始学习的初学者.在和大家探讨的过程中,我忽然发现了一个问题:在“ ...

  5. [总结]FFMPEG视音频编解码零基础学习方法【转】

    本文转载自:http://blog.csdn.net/leixiaohua1020/article/details/15811977 在CSDN上的这一段日子,接触到了很多同行业的人,尤其是使用FFM ...

  6. ios屏幕怎么投屏到电脑显示器

    iphone在国内一直都很受欢迎,为什么这么受欢迎呢?其实苹果手机操作系统非常的新颖,让人对手机有了重新的认识.但是ios屏幕怎么投屏到电脑显示器.感兴趣的一起阅读下面的内容吧! 使用工具: 苹果手机 ...

  7. iphone屏幕镜像怎么用 手机投屏电脑

    手机看视频有的时候总会感觉到累,屏幕太小看的不够爽又或者用手一直拿着手机看累得慌.我就就喜欢看电视因为电视屏幕大看的爽,而且现在很多手机视频都可以往电视上投影视频,那么iphone屏幕镜像怎么用? 使 ...

  8. iphone怎么投屏到电脑屏幕上

    随着苹果手机的更显换代,苹果手机的功能越来越强大,其中iphone手机更新了airplay镜像功能,所以想要手机投屏电脑的小伙伴就更加方便了,但是iphone怎么投屏到电脑呢?大家不用着急,下面即将为 ...

  9. 教你如何开发一个完败Miracast的投屏新功能

      手机与电视,是陪伴在我们生活最常见,陪伴最长久的智能设备.迅猛发展的移动终端与通信技术,赋予了手机更广阔多元的应用生态,大屏电视则以大视野和震撼影音,弥补了手里方寸带来的视觉局限.而今,手机的延伸 ...

  10. CS5265 新出TYPEC转HDMI 4K60 高清投屏转接方案|可替代RTD2172

    CS5265是一种高度集成的单芯片,主要用于设计typec转HDMI转接线或者typeC转HDMI转换器,应用在各种手机或者电脑显示端设备当中.用CS5265设计的TYPEC转HDMI 4K高清投屏线 ...

随机推荐

  1. MyISAM存储引擎的表级锁

    MyISAM存储引擎的表级锁 如果了解过文件锁的用法,那理解数据库锁就简单了.锁其实就协调多个进程或线程并发时,处理访问同一个资源的机制.在项目开发中,表锁是MySQL中作用范围较大的一种锁,它锁定的 ...

  2. U390630 分考场题解

    题目链接:U390630 分考场 本题来自于2019年蓝桥杯国赛的题.在洛谷上也被标为了假题.原因是首先官方在需要输出浮点数的情况下,并没有开启spj,并且官方所给的数据当中,总有一两个数据以不知道到 ...

  3. 小知识:RHEL7上设置Keepalived日志重定向

    1.配置 /etc/sysconfig/keepalived 文件 2.添加keepalived日志保存位置的配置 3.修改 /lib/systemd/system/keepalived.servic ...

  4. Mac 上 redis 的安装方法

    1.由于需要用到编译,所以先安装xcode,注意利用appstore安装xcode后,记得打开xcode 点install,也可以建立一个macos项目,运行下 试下. 2. 去官网下载:https: ...

  5. FDMemTable用法

    procedure TForm1.FormCreate(Sender: TObject); Var i:integer; begin // i:=15; self.FDMemTable1.FieldD ...

  6. Hive-beeline连接报错:root is not allowed to impersonate root (state=08S01,code=0)

    问题描述 使用hive/bin目录下的hive启动客户端,使用!connect jdbc:hive2://hadoop01:10000连接Hive数据仓库时提示输入用户名和密码,输入数据库的用户名和密 ...

  7. crontab 里如何创建带日期的日志文件

    需求在crontab 执行定时任务时,将执行的任务输出到带日期的文件中 crontab中,执行脚本需要传入系统时间date +"%Y-%m-%d" 问题今天遇到一个crontab问 ...

  8. sshd命令-测试sshd_config配置是否正确

    sshd命令来自于英文词组"SSH daemon"的缩写,其功能是用于openssh服务器守护进程.openssh套件能够为两台主机之间建立加密的.可信任的数据通信服务,是rlog ...

  9. JS leetcode 买卖股票的最佳时机 题解分析,我离职了。

    壹 ❀ 引 昨天下班后,还是找经理提出了辞职,没有犹豫的裸辞,今天与人事的对话不小心被后台的同事听到,一下在公司传开了,下午我与同事们多人对线,被他们的消息轰炸....没错,我真的要走了. 因为什么原 ...

  10. NC15172 情人节的电灯泡

    题目链接 题目 题目描述 情人节到了,小芳和小明手牵手,打算过一个完美的情人节,但是小刚偏偏也来了,当了一个明晃晃的电灯泡,小明很尴尬,就和小刚说,我交给你个任务,你完成了我俩就带你玩,否则你就回家吧 ...