NVIDIA Jetson TX1 系列开发教程之七:FFMPEG安装与测试


  • 转载请注明作者和出处:http://blog.csdn.net/u011475210
  • 嵌入式平台:NVIDIA Jetson TX1
  • 嵌入式系统:Ubuntu16.04
  • 虚拟机系统:Ubuntu14.04
  • 编者: WordZzzz

纪念雷神
雷神在中国视音频领域影响深远,值此纪念日,缅怀雷神。

——分割线——

前言:
本人需要在NVIDIA TX1上通过USB摄像头实现拍照和预览功能,奈何自己买的USB摄像头的YUYV格式只有640*480这么一种分辨率,JPEG格式下的分辨率倒是很多。现在TX1上面的多媒体例程已经更新到第12个:12_camera_v4l2_cuda。但是本人不才,还是没有在nvcc中找到与V4L2中的MJPEG对应的format格式,期间还搞过一段时间的Gstreamer,但是只能单独实现预览或者拍照功能,再往深了就不会写了,资料太少了。挣扎许久之后还是放弃了,转战Ffmpeg。其实Gstreamer和Ffmpeg这两者挺像的,都是先用命令行测试之后再码代码,但是Gstreamer包括晦涩难懂的OBJECT编程,最起码我学起来很吃力,以后有精力了再尝试吧。

一、Ffmpeg安装:
1.安装依赖:

1
2
3
4
5
sudo apt-get update
sudo apt-get -y install autoconf automake build-essential libass-dev libfreetype6-dev \
libsdl2-dev libtheora-dev libtool libva-dev libvdpau-dev libvorbis-dev libxcb1-dev libxcb-shm0-dev \
libxcb-xfixes0-dev pkg-config texinfo wget zlib1g-dev
sudo apt-get -y install yasm nasm libx264-dev libx265-dev libmp3lame-dev libopus-dev libvpx-dev

2.安装无法自动更新的libfdk-aac:

1
2
3
4
5
6
7
8
9
mkdir ~/ffmpeg_sources
cd ~/ffmpeg_sources
wget -O fdk-aac.tar.gz https://github.com/mstorsjo/fdk-aac/tarball/master
tar xzvf fdk-aac.tar.gz
cd mstorsjo-fdk-aac*
autoreconf -fiv
./configure --prefix="$HOME/ffmpeg_build" --disable-shared
make
make install

3.安装:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
cd ~/ffmpeg_sources
wget http://ffmpeg.org/releases/ffmpeg-snapshot.tar.bz2
tar xjvf ffmpeg-snapshot.tar.bz2
cd ffmpeg
PATH="$HOME/bin:$PATH" PKG_CONFIG_PATH="$HOME/ffmpeg_build/lib/pkgconfig" ./configure \
--prefix="$HOME/ffmpeg_build" \
--pkg-config-flags="--static" \
--extra-cflags="-I$HOME/ffmpeg_build/include" \
--extra-ldflags="-L$HOME/ffmpeg_build/lib" \
--bindir="$HOME/bin" \
--enable-gpl \
--enable-libass \
--enable-libfdk-aac \
--enable-libfreetype \
--enable-libmp3lame \
--enable-libopus \
--enable-libtheora \
--enable-libvorbis \
--enable-libvpx \
--enable-libx264 \
--enable-libx265 \
--enable-nonfree \
--enable-shared
PATH="$HOME/bin:$PATH" make
make install
hash -r

二、SDL安装:
先简单再介绍一下SDL:SDL是Simple DirectMedia Layer(简易直控媒体层)的缩写。它是一个跨平台的多媒体库,以用于直接控制底层的多媒体硬件的接口。主要用在开发游戏上!

Ubuntu的新得立已经包含SDL库,所以通过几个简单的命令就可以安装,比windows还傻瓜!

1
sudo apt-get install libsdl1.2-dev

附加包:

1
2
3
4
5
6
7
sudo apt-get install libsdl-image1.2-dev

sudo apt-get install libsdl-mixer1.2-dev

sudo apt-get install libsdl-ttf2.0-dev

sudo apt-get install libsdl-gfx1.2-dev

三、测试:
这里用雷神的代码进行测试,Ffmpeg+SDL实现摄像头采集与显示。
先参考先前的博客在主机上用Nsight新建工程,然后进行配置,具体配置如下:





代码如下,代码原文链接:http://blog.csdn.net/leixiaohua1020/article/details/39702113

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
#include <stdio.h>

#define __STDC_CONSTANT_MACROS

#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "SDL/SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <SDL/SDL.h>
#ifdef __cplusplus
};
#endif
#endif

//Output YUV420P
#define OUTPUT_YUV420P 0
//'1' Use Dshow
//'0' Use VFW
#define USE_DSHOW 0


//Refresh Event
#define SFM_REFRESH_EVENT (SDL_USEREVENT + 1)

#define SFM_BREAK_EVENT (SDL_USEREVENT + 2)

int thread_exit=0;

int sfp_refresh_thread(void *opaque)
{
thread_exit=0;
while (!thread_exit) {
SDL_Event event;
event.type = SFM_REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit=0;
//Break
SDL_Event event;
event.type = SFM_BREAK_EVENT;
SDL_PushEvent(&event);

return 0;
}


//Show Dshow Device
void show_dshow_device(){
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_devices","true",0);
AVInputFormat *iformat = av_find_input_format("dshow");
printf("========Device Info=============\n");
avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
printf("================================\n");
}

//Show Dshow Device Option
void show_dshow_device_option(){
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_options","true",0);
AVInputFormat *iformat = av_find_input_format("dshow");
printf("========Device Option Info======\n");
avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);
printf("================================\n");
}

//Show VFW Device
void show_vfw_device(){
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVInputFormat *iformat = av_find_input_format("vfwcap");
printf("========VFW Device Info======\n");
avformat_open_input(&pFormatCtx,"list",iformat,NULL);
printf("=============================\n");
}

//Show AVFoundation Device
void show_avfoundation_device(){
AVFormatContext *pFormatCtx = avformat_alloc_context();
AVDictionary* options = NULL;
av_dict_set(&options,"list_devices","true",0);
AVInputFormat *iformat = av_find_input_format("avfoundation");
printf("==AVFoundation Device Info===\n");
avformat_open_input(&pFormatCtx,"",iformat,&options);
printf("=============================\n");
}


int main(int argc, char* argv[])
{

AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;

av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();

//Open File
//char filepath[]="src01_480x272_22.h265";
//avformat_open_input(&pFormatCtx,filepath,NULL,NULL)

//Register Device
avdevice_register_all();

//Windows
#ifdef _WIN32

//Show Dshow Device
show_dshow_device();
//Show Device Options
show_dshow_device_option();
//Show VFW Options
show_vfw_device();

#if USE_DSHOW
AVInputFormat *ifmt=av_find_input_format("dshow");
//Set own video device's name
if(avformat_open_input(&pFormatCtx,"video=Integrated Camera",ifmt,NULL)!=0){
printf("Couldn't open input stream.\n");
return -1;
}
#else
AVInputFormat *ifmt=av_find_input_format("vfwcap");
if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
printf("Couldn't open input stream.\n");
return -1;
}
#endif
#elif defined linux
//Linux
AVInputFormat *ifmt=av_find_input_format("video4linux2");
if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
printf("Couldn't open input stream.\n");
return -1;
}
#else
show_avfoundation_device();
//Mac
AVInputFormat *ifmt=av_find_input_format("avfoundation");
//Avfoundation
//[video]:[audio]
if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
printf("Couldn't open input stream.\n");
return -1;
}
#endif


if(avformat_find_stream_info(pFormatCtx,NULL)<0)
{
printf("Couldn't find stream information.\n");
return -1;
}
videoindex=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
{
videoindex=i;
break;
}
if(videoindex==-1)
{
printf("Couldn't find a video stream.\n");
return -1;
}
pCodecCtx=pFormatCtx->streams[videoindex]->codec;
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL)
{
printf("Codec not found.\n");
return -1;
}
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
{
printf("Could not open codec.\n");
return -1;
}
AVFrame *pFrame,*pFrameYUV;
pFrame=av_frame_alloc();
pFrameYUV=av_frame_alloc();
//unsigned char *out_buffer=(unsigned char *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
//avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
//SDL----------------------------
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
printf( "Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
int screen_w=0,screen_h=0;
SDL_Surface *screen;
screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
screen = SDL_SetVideoMode(screen_w, screen_h, 0,0);

if(!screen) {
printf("SDL: could not set video mode - exiting:%s\n",SDL_GetError());
return -1;
}
SDL_Overlay *bmp;
bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height,SDL_YV12_OVERLAY, screen);
SDL_Rect rect;
rect.x = 0;
rect.y = 0;
rect.w = screen_w;
rect.h = screen_h;
//SDL End------------------------
int ret, got_picture;

AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));

#if OUTPUT_YUV420P
FILE *fp_yuv=fopen("output.yuv","wb+");
#endif

struct SwsContext *img_convert_ctx;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
//------------------------------
SDL_Thread *video_tid = SDL_CreateThread(sfp_refresh_thread,NULL);
//
SDL_WM_SetCaption("Simplest FFmpeg Read Camera",NULL);
//Event Loop
SDL_Event event;

for (;;) {
//Wait
SDL_WaitEvent(&event);
if(event.type==SFM_REFRESH_EVENT){
//------------------------------
if(av_read_frame(pFormatCtx, packet)>=0){
if(packet->stream_index==videoindex){
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if(ret < 0){
printf("Decode Error.\n");
return -1;
}
if(got_picture){
SDL_LockYUVOverlay(bmp);
pFrameYUV->data[0]=bmp->pixels[0];
pFrameYUV->data[1]=bmp->pixels[2];
pFrameYUV->data[2]=bmp->pixels[1];
pFrameYUV->linesize[0]=bmp->pitches[0];
pFrameYUV->linesize[1]=bmp->pitches[2];
pFrameYUV->linesize[2]=bmp->pitches[1];
sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);

#if OUTPUT_YUV420P
int y_size=pCodecCtx->width*pCodecCtx->height;
fwrite(pFrameYUV->data[0],1,y_size,fp_yuv); //Y
fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv); //U
fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv); //V
#endif

SDL_UnlockYUVOverlay(bmp);

SDL_DisplayYUVOverlay(bmp, &rect);

}
}
av_free_packet(packet);
}else{
//Exit Thread
thread_exit=1;
}
}else if(event.type==SDL_QUIT){
thread_exit=1;
}else if(event.type==SFM_BREAK_EVENT){
break;
}

}


sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P
fclose(fp_yuv);
#endif

SDL_Quit();

//av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);

return 0;
}

系列教程持续发布中,欢迎订阅、关注、收藏、评论、点赞哦~~( ̄▽ ̄~)~

完的汪(∪。∪)。。。zzz

0%