再次感谢 loken 大佬在《FFmpeg封装h264 to mpeg-ts》的回复 。
我结合《FFmpeg写入输出文件》与雷神大佬的相关博客内容想实现如下功能:
主要在 java 层用MediaCodec 编码Camera 数据,然后将编码后的数据存在一个队列里,在通过jni 调用ffmpeg 来取java层的h264数据,最后生成mpeg-ts文件。结果是生成的ts 文件不能播放
我是才开始学习FFmpeg,现在写的代码也只是依葫芦画瓢 ,希望各位能够帮忙看看有哪些错误?非常感谢!!!
以下是代码:
`
int read_packet(void *opaque, uint8_t *buf, int buf_size) {
// 获取 java MediaCodec 编码后的数据
return buf_size;
}
void *handleThread(void *pVoid) {
LOGD("THREAD START");
int ret;
size_t bufferSize = 102400;
unsigned char *inBuffer = NULL;
inBuffer = (unsigned char *) av_malloc(bufferSize);
const AVInputFormat *inFmt = av_find_input_format("h264");
AVFormatContext *inFmtCtx = NULL;
inFmtCtx = avformat_alloc_context();
AVIOContext *avIoIn = avio_alloc_context(inBuffer, bufferSize, 0, NULL, read_packet, NULL,
NULL);
if (avIoIn == NULL) {
LOGD("avio_alloc_context fail");
return nullptr;
}
inFmtCtx->pb = avIoIn;
inFmtCtx->flags = AVFMT_FLAG_CUSTOM_IO;
if ((ret = avformat_open_input(&inFmtCtx, "", inFmt, NULL)) < 0) {
LOGD("avformat_open_input fail %d", ret);
return nullptr;
}
int videoIndex = -1;
for (int i = 0; i < inFmtCtx->nb_streams; i++) {
if (inFmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoIndex = i;
LOGD("find video index");
break;
}
}
char *url = "test.ts";
AVFormatContext *outFmtCtx = NULL;
avformat_alloc_output_context2(&outFmtCtx, NULL, "mpegts", url);
if (!outFmtCtx) {
LOGD("avformat_alloc_output_context2 fail");
return nullptr;
}
const AVOutputFormat *outFmt = NULL;
outFmt = outFmtCtx->oformat;
AVStream *outStream = avformat_new_stream(outFmtCtx, NULL);
const AVCodec *encode = avcodec_find_encoder(AV_CODEC_ID_H264);
AVCodecContext *codecCtx = avcodec_alloc_context3(encode);
ret = avcodec_parameters_to_context(codecCtx, inFmtCtx->streams[videoIndex]->codecpar);
if (ret < 0) {
LOGD("avcodec_parameters_to_context error ,%d", ret);
return nullptr;
}
codecCtx->codec_tag = 0;
if (outFmtCtx->oformat->flags & AVFMT_GLOBALHEADER) {
codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
ret = avcodec_parameters_from_context(outStream->codecpar, codecCtx);
if (ret < 0) {
LOGD("avcodec_parameters_from_context fail %d", ret);
return nullptr;
}
ret = avio_open(&outFmtCtx->pb, url, AVIO_FLAG_WRITE);
if (ret < 0) {
LOGD("avio_open fail %d", ret);
return nullptr;
}
ret = avformat_write_header(outFmtCtx, NULL);
if (ret < 0) {
LOGD("avformat_write_header fail %d", ret);
return nullptr;
}
int64_t startTime = 0;
startTime = av_gettime();
AVPacket pkt;
int frameIndex = 0;
while (true) {
AVStream *in_stream, *out_stream;
ret = av_read_frame(inFmtCtx, &pkt);
if (ret < 0) {
LOGD("av_read_frame ret <0 , %d", ret);
break;
}
if (pkt.pts == AV_NOPTS_VALUE) {
//Write PTS
AVRational time_base1 = inFmtCtx->streams[videoIndex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration =
(double) AV_TIME_BASE / av_q2d(inFmtCtx->streams[videoIndex]->r_frame_rate);
//Parameters
pkt.pts = (double) (frameIndex * calc_duration) /
(double) (av_q2d(time_base1) * AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double) calc_duration / (double) (av_q2d(time_base1) * AV_TIME_BASE);
}
//Important:Delay
if (pkt.stream_index == videoIndex) {
AVRational time_base = inFmtCtx->streams[videoIndex]->time_base;
AVRational time_base_q = {1, AV_TIME_BASE};
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - startTime;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = inFmtCtx->streams[pkt.stream_index];
out_stream = outFmtCtx->streams[pkt.stream_index];
/* copy packet */
//转换PTS/DTS(Convert PTS/DTS)
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base,
(AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base,
(AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
frameIndex++;
ret = av_interleaved_write_frame(outFmtCtx, &pkt);
if (ret < 0) {
LOGD("Error muxing packet\n");
break;
}
av_packet_unref(&pkt);
}
av_write_trailer(outFmtCtx);
avcodec_close(codecCtx);
avformat_close_input(&inFmtCtx);
if (outFmtCtx && !(outFmt->flags & AVFMT_NOFILE)) {
avio_close(outFmtCtx->pb);
}
avformat_free_context(outFmtCtx);
LOGD("THREAD STOP");
return nullptr;
}
void start(JNIEnv *env, jobject thiz) {
pthread_t mThread;
pthread_create(&mThread, nullptr, handleThread, nullptr);
}
`