美文网首页
IOS IJKPlayer RTSP播放编译

IOS IJKPlayer RTSP播放编译

作者: copy_farmer | 来源:发表于2020-09-07 18:19 被阅读0次

1.IJKPlayer介绍

ijkplayer 是B站开源的一款做视频直播的播放器框架, 基于ffmpeg, 支持 Android 和 iOS 地址:https://github.com/Bilibili/ijkplayer.git

2.前期准备工作

2.1 克隆ijkplayer.git到本地目录(mac编译)
2.2 修改module-lite.sh (ijk 本身不支持rtp流媒体)

export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=tcp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=sdp"
export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtp"

2.3 修改ff_ffplay.c 中的packet_queue_get_or_buffering (处理rtsp中丢帧问题)

while (1) {
        int new_packet = packet_queue_get(q, pkt, 0, serial);
        if (new_packet < 0)
            return -1;
        else if (new_packet == 0) {
//            if (q->is_buffer_indicator && !*finished)
//                ffp_toggle_buffering(ffp, 1);
            new_packet = packet_queue_get(q, pkt, 1, serial);
            if (new_packet < 0)
                return -1;
        }

        if (*finished == *serial) {
            av_packet_unref(pkt);
            continue;
        }
        else
            break;
    }

    return 1;

2.4 修改ff_ffplay.c 中的vp_duration (处理延时问题)

static double vp_duration(VideoState *is, Frame *vp, Frame *nextvp) {
//    if (vp->serial == nextvp->serial) {
//        double duration = nextvp->pts - vp->pts;
//        if (isnan(duration) || duration <= 0 || duration > is->max_frame_duration)
//            return vp->duration;
//        else
//            return duration;
//    } else {
//        return 0.0;
//    }
    return vp->duration;
}

2.5 修改ff_ffplay.c 添加 拍照 录像 保存文件功能 添加完成之后需要在头文件声明方法

// 开始录制函数:file_name是保存路径
int ffp_start_record(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);
    ALOGD("start filename is %s",file_name);
    VideoState *is = ffp->is;
    
    ffp->m_ofmt_ctx = NULL;
    ffp->m_ofmt = NULL;
    ffp->is_record = 0;
    ffp->record_error = 0;
    
    if (!file_name || !strlen(file_name)) { // 没有路径
        av_log(ffp, AV_LOG_ERROR, "filename is invalid");
        ALOGD("filename is invalid");
        goto end;
    }
    
    if (!is || !is->ic|| is->paused || is->abort_request) { // 没有上下文,或者上下文已经停止
        av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
        ALOGD("is,is->ic,is->paused is invalid");
        goto end;
    }
    
    if (ffp->is_record) { // 已经在录制
        av_log(ffp, AV_LOG_ERROR, "recording has started");
         ALOGD("recording has started");
        goto end;
    }
    
    // 初始化一个用于输出的AVFormatContext结构体
    avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
    if (!ffp->m_ofmt_ctx) {
        av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
         ALOGD("Could not create output context filename is %s\n", file_name);
        goto end;
    }
    ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;
    
    for (int i = 0; i < is->ic->nb_streams; i++) {
        // 对照输入流创建输出流通道
        AVStream *in_stream = is->ic->streams[i];
        AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
            ALOGD("Failed allocating output stream");
            goto end;
        }
        
        // 将输入视频/音频的参数拷贝至输出视频/音频的AVCodecContext结构体
        av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
        if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
            ALOGD("Failed to copy context from input to output stream codec context");
            goto end;
        }
        
        out_stream->codec->codec_tag = 0;
        if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
        }
    }
    
    av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);
    
    // 打开输出文件
    if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
            ALOGD("Could not open output file '%s'", file_name);
            goto end;
        }
    }
    
    // 写视频文件头
    if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
        av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
        ALOGD("Error occurred when opening output file\n");
        goto end;
    }
    
    ffp->is_record = 1;
    ffp->record_error = 0;
    pthread_mutex_init(&ffp->record_mutex, NULL);
    
    return 0;
end:
    ffp->record_error = 1;
    return -1;
}
//*停止录播

int ffp_stop_record(FFPlayer *ffp)
{
    ALOGD("stopRecord");
    assert(ffp);
    if (ffp->is_record) {
        ffp->is_record = 0;
        pthread_mutex_lock(&ffp->record_mutex);
        if (ffp->m_ofmt_ctx != NULL) {
            av_write_trailer(ffp->m_ofmt_ctx);
            if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                avio_close(ffp->m_ofmt_ctx->pb);
            }
            avformat_free_context(ffp->m_ofmt_ctx);
            ffp->m_ofmt_ctx = NULL;
            ffp->is_first = 0;
        }
        pthread_mutex_unlock(&ffp->record_mutex);
        pthread_mutex_destroy(&ffp->record_mutex);
        av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
        ALOGD("stopRecord ok\n");
    } else {
        av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
        ALOGD("don't need stopRecord\n");
    }
    return 0;
}
//*截图
void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
{
  ALOGD("=============>start snapshot\n");

  VideoState *is = ffp->is;
  Frame *vp;
  int i = 0, linesize = 0, pixels = 0;
  uint8_t *src;

  vp = &is->pictq.queue[is->pictq.rindex];
  int height = vp->bmp->h;
  int width = vp->bmp->w;

  ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);

  // copy data to bitmap in java code
  linesize = vp->bmp->pitches[0];
  src = vp->bmp->pixels[0];
  pixels = width * 4;
  for (i = 0; i < height; i++) {
      memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
  }
  
  ALOGD("=============>end snapshot\n");
}
//*保存
int ffp_record_file(FFPlayer *ffp, AVPacket *packet)
{
    assert(ffp);
    VideoState *is = ffp->is;
    int ret = 0;
    AVStream *in_stream;
    AVStream *out_stream;
    
    if (ffp->is_record) {
        if (packet == NULL) {
            ffp->record_error = 1;
            av_log(ffp, AV_LOG_ERROR, "packet == NULL");
            return -1;
        }
        
        AVPacket *pkt = (AVPacket *)av_malloc(sizeof(AVPacket)); // 与看直播的 AVPacket分开,不然卡屏
        av_new_packet(pkt, 0);
        if (0 == av_packet_ref(pkt, packet)) {
            pthread_mutex_lock(&ffp->record_mutex);
            
            if (!ffp->is_first) { // 录制的第一帧,时间从0开始
                ffp->is_first = 1;
                pkt->pts = 0;
                pkt->dts = 0;
            } else { // 之后的每一帧都要减去,点击开始录制时的值,这样的时间才是正确的
//                pkt->pts = llabs(pkt->pts - ffp->start_pts);
//                pkt->dts = llabs(pkt->dts - ffp->start_dts);
                if (pkt->stream_index == AVMEDIA_TYPE_AUDIO) {
                   ALOGD("=============> AVMEDIA_TYPE_AUDIO\n");
                }
                else if (pkt->stream_index == AVMEDIA_TYPE_VIDEO) {
                    pkt->pts = llabs(pkt->pts - ffp->start_pts);
                    pkt->dts = llabs(pkt->dts - ffp->start_dts);
                }
            }
        

            in_stream  = is->ic->streams[pkt->stream_index];
            out_stream = ffp->m_ofmt_ctx->streams[pkt->stream_index];
            
            // 转换PTS/DTS
            pkt->pts = av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt->dts = av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base, (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
            pkt->duration = av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);
            pkt->pos = -1;
                  
            // 写入一个AVPacket到输出文件
            if ((ret = av_interleaved_write_frame(ffp->m_ofmt_ctx, pkt)) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Error muxing packet\n");
            }
            
            av_packet_unref(pkt);
            pthread_mutex_unlock(&ffp->record_mutex);
        } else {
            av_log(ffp, AV_LOG_ERROR, "av_packet_ref == NULL");
        }
    }
    return ret;
}

3.开始编译

3.1修改支持module-lite.sh 终端进入config目录

cd config
rm module.sh
ln -s module-lite.sh module.sh

3.2 下载ffmpeg

./init-ios.sh

3.3 编译ffmpeg

./compile-ffmpeg.sh clean
./compile-ffmpeg.sh all

ps:最新的 Xcode 已经弱化了对 32 位的支持, 解决方法:
在 compile-ffmpeg.sh 中删除 armv7 , 修改如:
FF_ALL_ARCHS_IOS8_SDK="arm64 i386 x86_64"
再重新执行出现错误的命令: ./compile-ffmpeg.sh all

4.IJKMediaFramework 生成

4.1 IJKMediaPlayback.h中添加如下方法

- (void)stopRecord;
- (void)startRecordWithFileName:(NSString *)fileName;
- (BOOL)isRecording ;

4.2 IJKFFMoviePlayerController.m中添加如下方法

- (void)stopRecord {
    ijkmp_stop_record(_mediaPlayer);
}

- (void)startRecordWithFileName:(NSString *)fileName {
    // 视频存储的路径
    const char *path = [fileName cStringUsingEncoding:NSUTF8StringEncoding];
    ijkmp_start_record (_mediaPlayer, path);
}

- (BOOL)isRecording {
    return ijkmp_isRecording(_mediaPlayer);
}

4.3点击IJKMediaFramework项目进行编译 生成framework文件,并将build 文件下的都copy到项目即可

5.IJK使用

[IJKFFMoviePlayerController setLogReport:YES];
    [IJKFFMoviePlayerController setLogLevel:k_IJK_LOG_DEBUG];

    // 调整参数
    IJKFFOptions *options = [IJKFFOptions optionsByDefault];
    // 如果是rtsp协议,可以优先用tcp(默认是用udp)
    [options setFormatOptionValue:@"tcp" forKey:@"rtsp_transport"];
    // 播放前的探测Size,默认是1M, 改小一点会出画面更快
    [options setFormatOptionIntValue:1024 * 16 forKey:@"probsize"];
    // 播放前的探测时间
    [options setFormatOptionIntValue:50000 forKey:@"analyzeduration"];
    // 开启硬解码 1是硬解 0是软解
    [options setPlayerOptionIntValue:0 forKey:@"videotoolbox"];
    // 播放重连次数
    [options setPlayerOptionIntValue:5 forKey:@"reconnect"];
    
    // 设置最大fps
    [options setPlayerOptionIntValue:30  forKey:@"max-fps"];//30
    // 跳帧开关,如果cpu解码能力不足,可以设置成5,否则
    // 会引起音视频不同步,也可以通过设置它来跳帧达到倍速播放
    [options setPlayerOptionIntValue:1 forKey:@"framedrop"];
    // 解码参数,画面更清晰
    [options setCodecOptionIntValue:IJK_AVDISCARD_DEFAULT forKey:@"skip_loop_filter"];
    [options setCodecOptionIntValue:IJK_AVDISCARD_DEFAULT forKey:@"skip_frame"];
    
    // 最大缓存大小是3秒,可以依据自己的需求修改
    [options setPlayerOptionIntValue:3000 forKey:@"max_cached_duration"];
    // 无限读
    [options setPlayerOptionIntValue:1 forKey:@"infbuf"];
    // 关闭播放器缓冲
    [options setPlayerOptionIntValue:0 forKey:@"packet-buffering"];
    
    //静音设置
    [options setPlayerOptionValue:@"1" forKey:@"an"];
    //帧数
    [options setPlayerOptionIntValue:5  forKey:@"min-frames"];
    // 帧速率(fps) (可以改,确认非标准桢率会导致音画不同步,所以只能设定为15或者29.97)
    [options setPlayerOptionIntValue:15 forKey:@"r"];
    [options setPlayerOptionIntValue:5  forKey:@"reconnect"];
    [IJKFFMoviePlayerController checkIfFFmpegVersionMatch:YES];

    self.ijkPlayer = [[IJKFFMoviePlayerController alloc] initWithContentURLString:@"rtsp://192.168.99.1:554/11/" withOptions:options];
    self.ijkPlayer.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
    self.ijkPlayer.view.frame = self.videoBgView.bounds;
    self.ijkPlayer.view.backgroundColor = [UIColor clearColor];
    self.ijkPlayer.shouldAutoplay = YES;

    UIView *playerView = [self.ijkPlayer view];
    playerView.backgroundColor = [UIColor clearColor];
    [self.videoBgView insertSubview:playerView atIndex:1];
    [self.ijkPlayer setScalingMode:IJKMPMovieScalingModeAspectFill];

相关文章

网友评论

      本文标题:IOS IJKPlayer RTSP播放编译

      本文链接:https://www.haomeiwen.com/subject/vqrjektx.html