admin管理员组

文章数量:1534879

2024年1月22日发(作者:)

29. #endif

30.

31. AVFormatContext *ofmt_ctx;

32. AVStream* video_st;

33. AVCodecContext* pCodecCtx;

34. AVCodec* pCodec;

35. AVPacket enc_pkt;

36. AVFrame *pFrameYUV;

37.

38. int framecnt = 0;

39. int yuv_width;

40. int yuv_height;

41. int y_length;

42. int uv_length;

43. int64_t start_time;

44.

45. //Output FFmpeg's av_log()

46. void custom_log(void *ptr, int level, const char* fmt, va_list vl){

47. FILE *fp=fopen("/storage/emulated/0/av_","a+");

48. if(fp){

49. vfprintf(fp,fmt,vl);

50. fflush(fp);

51. fclose(fp);

52. }

53. }

54.

55. JNIEXPORT jint JNICALL Java_com_zhanghui_test_MainActivity_initial

56. (JNIEnv *env, jobject obj,jint width,jint height)

57. {

58. const char* out_path = "/sdcard/zhanghui/";

59. yuv_width=width;

60. yuv_height=height;

61. y_length=width*height;

62. uv_length=width*height/4;

63.

64. //FFmpeg av_log() callback

65. av_log_set_callback(custom_log);

66.

67. av_register_all();

68.

69. //output initialize

70. avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);

71. //output encoder initialize

72. pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);

73. if (!pCodec){

74. LOGE("Can not find encoder!n");

75. return -1;

76. }

77. pCodecCtx = avcodec_alloc_context3(pCodec);

78. pCodecCtx->pix_fmt = PIX_FMT_YUV420P;

79. pCodecCtx->width = width;

80. pCodecCtx->height = height;

81. pCodecCtx->time_ = 1;

82. pCodecCtx->time_ = 30;

83. pCodecCtx->bit_rate = 800000;

84. pCodecCtx->gop_size = 300;

85. /* Some formats want stream headers to be separate. */

86. if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)

87. pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;

88.

89. //H264 codec param

90. //pCodecCtx->me_range = 16;

91. //pCodecCtx->max_qdiff = 4;

92. //pCodecCtx->qcompress = 0.6;

93. pCodecCtx->qmin = 10;

94. pCodecCtx->qmax = 51;

95. //Optional Param

96. pCodecCtx->max_b_frames = 3;

97. // Set H264 preset and tune

98. AVDictionary *param = 0;

99. av_dict_set(?m, "preset", "ultrafast", 0);

100. av_dict_set(?m, "tune", "zerolatency", 0);

101.

102. if (avcodec_open2(pCodecCtx, pCodec, ?m) < 0){

103. LOGE("Failed to open encoder!n");

104. return -1;

105. }

106.

107. //Add a new stream to output,should be called by the user before avformat_write_header() for muxing

108. video_st = avformat_new_stream(ofmt_ctx, pCodec);

109. if (video_st == NULL){

110. return -1;

111. }

112. video_st->time_ = 1;

113. video_st->time_ = 30;

114. video_st->codec = pCodecCtx;

115.

116. //Open output URL,set before avformat_write_header() for muxing

117. if (avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE) < 0){

118. LOGE("Failed to open output file!n");

119. return -1;

119. return -1;

120. }

121.

122. //Write File Header

123. avformat_write_header(ofmt_ctx, NULL);

124.

125. start_time = av_gettime();

126. return 0;

127. }

128.

129. JNIEXPORT jint JNICALL Java_com_zhanghui_test_MainActivity_encode

130. (JNIEnv *env, jobject obj, jbyteArray yuv)

131. {

132. int ret;

133. int enc_got_frame=0;

134. int i=0;

135.

136. pFrameYUV = avcodec_alloc_frame();

137. uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));

138. avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

139.

140. //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式

141. jbyte* in= (jbyte*)(*env)->GetByteArrayElements(env,yuv,0);

142. memcpy(pFrameYUV->data[0],in,y_length);

143. for(i=0;i

144. {

145. *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);

146. *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);

147. }

148.

149. pFrameYUV->format = AV_PIX_FMT_YUV420P;

150. pFrameYUV->width = yuv_width;

151. pFrameYUV->height = yuv_height;

152.

153. enc_ = NULL;

154. enc_ = 0;

155. av_init_packet(&enc_pkt);

156. ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);

157. av_frame_free(&pFrameYUV);

158.

159. if (enc_got_frame == 1){

160. LOGI("Succeed to encode frame: %5dtsize:%5dn", framecnt, enc_);

161. framecnt++;

162. enc__index = video_st->index;

163.

164. //Write PTS

165. AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };

166. AVRational r_framerate1 = {60, 2 };//{ 50, 2 };

167. AVRational time_base_q = { 1, AV_TIME_BASE };

168. //Duration between 2 frames (us)

169. int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳

170. //Parameters

171. //enc_ = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

172. enc_ = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);

173. enc_ = enc_;

174. enc_on = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));

175. enc_ = -1;

176.

177. //Delay

178. int64_t pts_time = av_rescale_q(enc_, time_base, time_base_q);

179. int64_t now_time = av_gettime() - start_time;

180. if (pts_time > now_time)

181. av_usleep(pts_time - now_time);

182.

183. ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);

184. av_free_packet(&enc_pkt);

185. }

186.

187. return 0;

188. }

189.

190. JNIEXPORT jint JNICALL Java_com_zhanghui_test_MainActivity_flush

191. (JNIEnv *env, jobject obj)

192. {

193. int ret;

194. int got_frame;

195. AVPacket enc_pkt;

196. if (!(ofmt_ctx->streams[0]->codec->codec->capabilities &

197. CODEC_CAP_DELAY))

198. return 0;

199. while (1) {

200. enc_ = NULL;

201. enc_ = 0;

202. av_init_packet(&enc_pkt);

203. ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,

204. NULL, &got_frame);

205. if (ret < 0)

206. break;

207. if (!got_frame){

208. ret = 0;

本文标签: 格式综合教程数据摄像头