在上一章Android本地视频播放器开发--SDL编译编译中编译出sdl的支持库,当时我们使用的2.0,但是有些api被更改了,所以在以下的使用者中我们使用SDL1.3的库,这个库我会传上源码以及编译出的库,接下来这张我们使用ffmpeg解码视频文件中的视频帧同时使用SDL去显示。

1、Decodec_Video.c 这是我视频解码的文件,其中内容如下:

[cpp]
#include <stdio.h>  
#include <android/log.h>  
 
#ifdef __MINGW32__  
#undef main /* Prevents SDL from overriding main() */  
#endif  
 
#include "../SDL/include/SDL.h"  
#include "../SDL/include/SDL_thread.h"  
 
#include "VideoPlayerDecode.h"  
#include "../ffmpeg/libavutil/avutil.h"  
#include "../ffmpeg/libavcodec/avcodec.h"  
#include "../ffmpeg/libavformat/avformat.h"  
#include "../ffmpeg/libswscale/swscale.h"  
 
AVFormatContext *pFormatCtx; 
int             i, videoStream; 
AVCodecContext  *pCodecCtx; 
AVCodec         *pCodec; 
AVFrame         *pFrame; 
AVPacket        packet; 
int             frameFinished; 
float           aspect_ratio; 
 
static struct SwsContext *img_convert_ctx; 
SDL_Surface     *screen; 
SDL_Overlay *bmp; 
SDL_Rect        rect; 
SDL_Event       event; 
 
 
JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer 
(JNIEnv *env, jclass clz, jstring fileName) 

    const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL); 
    av_register_all();//注册所有支持的文件格式以及编解码器  
    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); 
        exit(1); 
    } 
    if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0) 
                return -1; 
    if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 
                return -1; 
    av_dump_format(pFormatCtx, -1, local_title, 0); 
    videoStream=-1; 
    for(i=0; i<pFormatCtx->nb_streams; i++) 
        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
            videoStream=i; 
            break; 
        } 
    if(videoStream==-1) 
        return -1; // Didn't find a video stream  
    // Get a pointer to the codec context for the video stream  
    pCodecCtx=pFormatCtx->streams[videoStream]->codec; 
 
    // Find the decoder for the video stream  
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id); 
    if(pCodec==NULL) { 
        fprintf(stderr, "Unsupported codec!\n"); 
        return -1; // Codec not found  
    } 
    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1; 
    pFrame = avcodec_alloc_frame(); 
    if(pFrame == NULL)return -1; 
    // Make a screen to put our video  
#ifndef __DARWIN__  
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); 
#else  
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); 
#endif  
    if(!screen) { 
        fprintf(stderr, "SDL: could not set video mode - exiting\n"); 
        exit(1); 
    } 
    // Allocate a place to put our YUV image on that screen  
    bmp = SDL_CreateYUVOverlay(pCodecCtx->width, 
            pCodecCtx->height, 
            SDL_YV12_OVERLAY, 
            screen); 
    img_convert_ctx = sws_getContext(pCodecCtx->width,   
                          pCodecCtx->height, pCodecCtx->pix_fmt,   
                          pCodecCtx->width, pCodecCtx->height,   
                          PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);   
 
    // Read frames and save first five frames to disk  
    i=0; 
    while(av_read_frame(pFormatCtx, &packet)>=0) { 
        // Is this a packet from the video stream?  
        if(packet.stream_index==videoStream) { 
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
            // Did we get a video frame?  
            if(frameFinished) { 
                SDL_LockYUVOverlay(bmp); 
                 
                AVPicture *pict; 
                pict->data[0] = bmp->pixels[0]; 
                pict->data[1] = bmp->pixels[2]; 
                pict->data[2] = bmp->pixels[1]; 
 
                pict->linesize[0] = bmp->pitches[0]; 
                pict->linesize[1] = bmp->pitches[2]; 
                pict->linesize[2] = bmp->pitches[1]; 
                 
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize); 
                SDL_UnlockYUVOverlay(bmp); 
 
                rect.x = 0; 
                rect.y = 0; 
                rect.w = pCodecCtx->width; 
                rect.h = pCodecCtx->height; 
                SDL_DisplayYUVOverlay(bmp, &rect); 
 
            } 
        } 
        // Free the packet that was allocated by av_read_frame  
        av_free_packet(&packet); 
        SDL_PollEvent(&event); 
        switch(event.type) { 
            case SDL_QUIT: 
                SDL_Quit(); 
                exit(0); 
                break; 
            default: 
                break; 
        } 
 
    } 
    // Free the YUV frame  
    av_free(pFrame); 
 
    // Close the codec  
    avcodec_close(pCodecCtx); 
 
    // Close the video file  
    av_close_input_file(pFormatCtx); 
}

#include <stdio.h>
#include <android/log.h>

#ifdef __MINGW32__
#undef main /* Prevents SDL from overriding main() */
#endif

#include "../SDL/include/SDL.h"
#include "../SDL/include/SDL_thread.h"

#include "VideoPlayerDecode.h"
#include "../ffmpeg/libavutil/avutil.h"
#include "../ffmpeg/libavcodec/avcodec.h"
#include "../ffmpeg/libavformat/avformat.h"
#include "../ffmpeg/libswscale/swscale.h"

AVFormatContext *pFormatCtx;
int             i, videoStream;
AVCodecContext  *pCodecCtx;
AVCodec         *pCodec;
AVFrame         *pFrame;
AVPacket        packet;
int             frameFinished;
float           aspect_ratio;

static struct SwsContext *img_convert_ctx;
SDL_Surface     *screen;
SDL_Overlay *bmp;
SDL_Rect        rect;
SDL_Event       event;

JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer
(JNIEnv *env, jclass clz, jstring fileName)
{
 const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL);
 av_register_all();//注册所有支持的文件格式以及编解码器
 if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
  fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
  exit(1);
 }
 if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0)
                return -1;
 if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
                return -1;
 av_dump_format(pFormatCtx, -1, local_title, 0);
 videoStream=-1;
 for(i=0; i<pFormatCtx->nb_streams; i++)
  if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
   videoStream=i;
   break;
  }
 if(videoStream==-1)
  return -1; // Didn't find a video stream
 // Get a pointer to the codec context for the video stream
 pCodecCtx=pFormatCtx->streams[videoStream]->codec;

// Find the decoder for the video stream
 pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
 if(pCodec==NULL) {
  fprintf(stderr, "Unsupported codec!\n");
  return -1; // Codec not found
 }
 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1;
 pFrame = avcodec_alloc_frame();
 if(pFrame == NULL)return -1;
 // Make a screen to put our video
#ifndef __DARWIN__
 screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
#else
 screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
#endif
 if(!screen) {
  fprintf(stderr, "SDL: could not set video mode - exiting\n");
  exit(1);
 }
 // Allocate a place to put our YUV image on that screen
 bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
   pCodecCtx->height,
   SDL_YV12_OVERLAY,
   screen);
  img_convert_ctx = sws_getContext(pCodecCtx->width, 
                          pCodecCtx->height, pCodecCtx->pix_fmt, 
                          pCodecCtx->width, pCodecCtx->height, 
                          PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);

// Read frames and save first five frames to disk
 i=0;
 while(av_read_frame(pFormatCtx, &packet)>=0) {
  // Is this a packet from the video stream?
  if(packet.stream_index==videoStream) {
   avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
   // Did we get a video frame?
   if(frameFinished) {
    SDL_LockYUVOverlay(bmp);
    
    AVPicture *pict;
    pict->data[0] = bmp->pixels[0];
    pict->data[1] = bmp->pixels[2];
    pict->data[2] = bmp->pixels[1];

pict->linesize[0] = bmp->pitches[0];
    pict->linesize[1] = bmp->pitches[2];
    pict->linesize[2] = bmp->pitches[1];
    
sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize);
    SDL_UnlockYUVOverlay(bmp);

rect.x = 0;
    rect.y = 0;
    rect.w = pCodecCtx->width;
    rect.h = pCodecCtx->height;
    SDL_DisplayYUVOverlay(bmp, &rect);

}
  }
  // Free the packet that was allocated by av_read_frame
  av_free_packet(&packet);
  SDL_PollEvent(&event);
  switch(event.type) {
   case SDL_QUIT:
    SDL_Quit();
    exit(0);
    break;
   default:
    break;
  }

}
 // Free the YUV frame
 av_free(pFrame);

// Close the codec
 avcodec_close(pCodecCtx);

// Close the video file
 av_close_input_file(pFormatCtx);
}
2、编译结果如下:

[cpp]
root@zhangjie:/Graduation/jni# ndk-build 
Install        : libSDL.so => libs/armeabi/libSDL.so 
Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so 
Compile arm    : ffmpeg-test-neon <= Decodec_Video.c 
/Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer': 
/Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default] 
/Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **' 
/Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations] 
SharedLibrary  : libffmpeg-test-neon.so 
Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so

root@zhangjie:/Graduation/jni# ndk-build
Install        : libSDL.so => libs/armeabi/libSDL.so
Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so
Compile arm    : ffmpeg-test-neon <= Decodec_Video.c
/Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer':
/Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default]
/Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **'
/Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations]
SharedLibrary  : libffmpeg-test-neon.so
Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so3、SDL1.3源码

4、之前在Android本地视频播放器开发--NDK编译FFmpeg中没有添加swscale功能,所以需要重新编译ffmpeg,其脚本如下:

[plain]
NDK=/opt/android-ndk-r8d 
PLATFORM=$NDK/platforms/android-8/arch-arm/ 
PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86 
LOCAL_ARM_NEON=true 
CPU=armv7-a 
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8" 
PREFIX=./android/$CPU 
./configure --target-os=linux \ 
    --prefix=$PREFIX \ 
    --enable-cross-compile \ 
    --arch=arm \ 
    --enable-nonfree \ 
    --enable-asm \ 
    --cpu=cortex-a8 \ 
    --enable-neon \ 
    --cc=$PREBUILT/bin/arm-linux-androideabi-gcc \ 
    --cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \ 
    --nm=$PREBUILT/bin/arm-linux-androideabi-nm \ 
    --sysroot=$PLATFORM \ 
    --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS " \ 
    --disable-shared \ 
    --enable-static \ 
    --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog" \ 
    --disable-ffmpeg \ 
    --disable-ffplay \ 
    --disable-ffprobe \ 
    --disable-ffserver \ 
    --disable-encoders \ 
    --enable-avformat \ 
    --disable-optimizations \ 
    --disable-doc \ 
    --enable-pthreads \ 
    --disable-yasm \ 
    --enable-zlib \ 
    --enable-pic \ 
    --enable-small 
 
#make clean 
make  -j4 install 
 
$PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o 
 
$PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -soname libffmpeg-neon.so -shared -nostdlib  -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a  libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog  --warn-once  --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a

NDK=/opt/android-ndk-r8d
PLATFORM=$NDK/platforms/android-8/arch-arm/
PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86
LOCAL_ARM_NEON=true
CPU=armv7-a
OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8"
PREFIX=./android/$CPU
./configure --target-os=linux \
    --prefix=$PREFIX \
    --enable-cross-compile \
    --arch=arm \
    --enable-nonfree \
    --enable-asm \
    --cpu=cortex-a8 \
    --enable-neon \
    --cc=$PREBUILT/bin/arm-linux-androideabi-gcc \
    --cross-prefix=$PREBUILT/bin/arm-linux-androideabi- \
    --nm=$PREBUILT/bin/arm-linux-androideabi-nm \
    --sysroot=$PLATFORM \
    --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS " \
    --disable-shared \
    --enable-static \
    --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog" \
    --disable-ffmpeg \
    --disable-ffplay \
    --disable-ffprobe \
    --disable-ffserver \
    --disable-encoders \
    --enable-avformat \
    --disable-optimizations \
    --disable-doc \
    --enable-pthreads \
    --disable-yasm \
    --enable-zlib \
    --enable-pic \
    --enable-small

#make clean
make  -j4 install

$PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o

$PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -soname libffmpeg-neon.so -shared -nostdlib  -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a  libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog  --warn-once  --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a

怎样在Android本地视频播放器开发的更多相关文章

  1. Android本地视频播放器开发--视频解码

    在上一章Android本地视频播放器开发--SDL编译编译中编译出sdl的支持库,当时我们使用的2.0,但是有些api被更改了,所以在以下的使用者中我们使用SDL1.3的库,这个库我会传上源码以及编译 ...

  2. Android本地视频播放器开发--简易播放器原型

    在以前的基础上,将音视频进行合并,音频播放采用OpenSL ES,视频播放采用OpenGL ES2.0进行显示,这次的版本其中音频和视频是在同一个线程,会造成音频断断续续,后续会采用音频使用SDL,视 ...

  3. React Native实战系列教程之自定义原生UI组件和VideoView视频播放器开发

    React Native实战系列教程之自定义原生UI组件和VideoView视频播放器开发   2016/09/23 |  React Native技术文章 |  Sky丶清|  4 条评论 |  1 ...

  4. android音乐播放器开发教程

    android音乐播放器开发教程 Android扫描sd卡和系统文件 Android 关于录音文件的编解码 实现米聊 微信一类的录音上传的功能 android操作sdcard中的多媒体文件——音乐列表 ...

  5. android视频播放器开发

    http://blog.csdn.net/u010181592/article/details/49301703 http://blog.csdn.net/qq_33291295/article/de ...

  6. 22_Android中的本地音乐播放器和网络音乐播放器的编写,本地视频播放器和网络视频播放器,照相机案例,偷拍案例实现

    1 编写以下案例: 当点击了"播放"之后,在手机上的/mnt/sdcard2/natural.mp3就会播放. 2 编写布局文件activity_main.xml <Line ...

  7. android音乐播放器开发 SweetMusicPlayer 载入歌曲列表

    上一篇写了播放器的总体实现思路,http://blog.csdn.net/huweigoodboy/article/details/39855653,如今来总结下载入歌曲列表. 代码地址:https: ...

  8. android音乐播放器开发 SweetMusicPlayer 播放本地音乐

    上一篇写了载入歌曲列表,http://blog.csdn.net/huweigoodboy/article/details/39856411,如今来总结下播放本地音乐. 一,MediaPlayer 首 ...

  9. android音乐播放器开发 SweetMusicPlayer 智能匹配本地歌词

    上一篇写了使用MediaPlayer播放音乐,http://blog.csdn.net/huweigoodboy/article/details/39861539. 代码地址:https://gith ...

随机推荐

  1. vi 快捷键积累

    依据自己用到的.或者还没记住的.或者用的时候忘了的,慢慢积累. 一.全选复制粘贴. 全选: ggVG // 凝视: gg 光标移到首行 V 进入Visual(可视)模式 G 光标移到最后一行全选 选中 ...

  2. STM32启动模式

    STM32三种启动模式对应的存储介质均是芯片内置的,它们是: 1)用户闪存 = 芯片内置的Flash.2)SRAM = 芯片内置的RAM区,就是内存啦.3)系统存储器 = 芯片内部一块特定的区域,芯片 ...

  3. js获取网页屏幕可见区域高度

    document.body.clientWidth ==> BODY对象宽度 document.body.clientHeight ==> BODY对象高度 document.docume ...

  4. Bootstrap3学习笔记

    Bootstrap3简单介绍----专题1 声明:本文章是给初学bootstrap3的同学添加记忆的, 一定有非常多欠缺和不完好的地方, 希望能帮助到大家, 也希望能让很多其它的人了解boostrap ...

  5. ASM丢失disk header导致ORA-15032、ORA-15040、ORA-15042 Diskgroup无法mount

    SQL> select * from v$version; BANNER --------------------------– Oracle Database 11g Enterprise E ...

  6. 实现长按删除QListWidget的Item

    原地址:http://blog.sina.com.cn/s/blog_5c70dfc80100r99u.html 要想长按删除QListWidget的Item,必须重写鼠标事件,所以需要继承QList ...

  7. 根据input 标签取value属性的值

    jrhmpt01:/root/lwp/0526# cat a1.pl use LWP::UserAgent; use DBI; use POSIX; use Data::Dumper; use HTM ...

  8. 基于visual Studio2013解决面试题之0507字符串转整数

     题目

  9. Effective C++_笔记_条款00_基本术语

    (整理自Effctive C++,转载请注明.整理者:华科小涛@http://www.cnblogs.com/hust-ghtao/) 下面是每一位C++程序员都应该了解的C++词汇. 1  C++中 ...

  10. MSSQL - SQL Server2008附加数据库失败 错误号:5120

    附加数据库时,显示错误,错误信息为 一种解决方法为,设置mdf文件所在文件夹的权限(有些资料说只设置mdf文件的权限就好,但我试了不管用),在文件夹上右击——属性——安全,如图所示: 选择组或用户名中 ...