GRASS Programmer's Manual  6.4.2(2012)
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Pages
gsd_img_mpeg.c
Go to the documentation of this file.
1 
19 #include <stdlib.h>
20 #include <string.h>
21 
22 #include <grass/gis.h>
23 #include <grass/glocale.h>
24 #include <grass/ogsf_proto.h>
25 #include <grass/gstypes.h>
26 
27 /* FFMPEG stuff */
28 #ifdef HAVE_FFMPEG
29 #include <avformat.h>
30 
31 /* 5 seconds stream duration */
32 #define STREAM_DURATION 5.0
33 #define STREAM_FRAME_RATE 25 /* 25 images/s */
34 #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
35 #define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */
36 
37 AVFrame *picture, *tmp_picture;
38 uint8_t *video_outbuf;
39 int frame_count, video_outbuf_size;
40 
41 AVOutputFormat *fmt;
42 AVFormatContext *oc;
43 AVStream *video_st;
44 
55 static AVStream *add_video_stream(AVFormatContext * oc, int codec_id, int w,
56  int h)
57 {
58  AVCodecContext *c;
59  AVStream *st;
60 
61  st = av_new_stream(oc, 0);
62  if (!st) {
63  G_warning(_("Unable to allocate stream"));
64  return NULL;
65  }
66 
67  c = st->codec;
68  c->codec_id = codec_id;
69  c->codec_type = CODEC_TYPE_VIDEO;
70 
71  /* put sample parameters */
72  c->bit_rate = 400000;
73  /* resolution must be a multiple of two */
74  c->width = w;
75  c->height = h;
76  /* time base: this is the fundamental unit of time (in seconds) in terms
77  of which frame timestamps are represented. for fixed-fps content,
78  timebase should be 1/framerate and timestamp increments should be
79  identically 1. */
80  c->time_base.den = STREAM_FRAME_RATE;
81  c->time_base.num = 1;
82  c->gop_size = 12; /* emit one intra frame every twelve frames at most */
83  c->pix_fmt = STREAM_PIX_FMT;
84  if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
85  /* just for testing, we also add B frames */
86  c->max_b_frames = 2;
87  }
88  if (c->codec_id == CODEC_ID_MPEG1VIDEO) {
89  /* Needed to avoid using macroblocks in which some coeffs overflow.
90  This does not happen with normal video, it just happens here as
91  the motion of the chroma plane does not match the luma plane. */
92  c->mb_decision = 2;
93  }
94  /* some formats want stream headers to be separate */
95  if (!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov")
96  || !strcmp(oc->oformat->name, "3gp"))
97  c->flags |= CODEC_FLAG_GLOBAL_HEADER;
98 
99  c->flags |= CODEC_FLAG_QSCALE;
100  c->global_quality = st->quality = FF_QP2LAMBDA * 10;
101 
102  return st;
103 }
104 
115 static AVFrame *alloc_picture(int pix_fmt, int width, int height)
116 {
117  AVFrame *picture;
118  uint8_t *picture_buf;
119  int size;
120 
121  picture = avcodec_alloc_frame();
122 
123  if (!picture)
124  return NULL;
125 
126  size = avpicture_get_size(pix_fmt, width, height);
127  picture_buf = av_malloc(size);
128 
129  if (!picture_buf) {
130  av_free(picture);
131  return NULL;
132  }
133 
134  avpicture_fill((AVPicture *) picture, picture_buf,
135  pix_fmt, width, height);
136 
137  return picture;
138 }
139 
146 static void open_video(AVFormatContext * oc, AVStream * st)
147 {
148  AVCodec *codec;
149  AVCodecContext *c;
150 
151  c = st->codec;
152 
153  /* find the video encoder */
154  codec = avcodec_find_encoder(c->codec_id);
155  if (!codec) {
156  G_warning(_("Video codec not found"));
157  return;
158  }
159 
160  /* open the codec */
161  if (avcodec_open(c, codec) < 0) {
162  G_warning(_("Unable to open codec"));
163  return;
164  }
165 
166  video_outbuf = NULL;
167  if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
168  /* allocate output buffer */
169  /* XXX: API change will be done */
170  /* buffers passed into lav* can be allocated any way you prefer,
171  as long as they're aligned enough for the architecture, and
172  they're freed appropriately (such as using av_free for buffers
173  allocated with av_malloc) */
174  video_outbuf_size = 200000;
175  video_outbuf = av_malloc(video_outbuf_size);
176  }
177 
178  /* allocate the encoded raw picture */
179  picture = alloc_picture(c->pix_fmt, c->width, c->height);
180  if (!picture) {
181  G_warning(_("Unable to allocate picture"));
182  return;
183  }
184 
185  /* if the output format is not YUV420P, then a temporary YUV420P
186  picture is needed too. It is then converted to the required
187  output format */
188  tmp_picture = NULL;
189  if (c->pix_fmt != PIX_FMT_YUV420P) {
190  tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
191  if (!tmp_picture) {
192  G_warning(_("Unable to allocate temporary picture"));
193  return;
194  }
195  }
196 }
197 
204 static void write_video_frame(AVFormatContext * oc, AVStream * st)
205 {
206  int out_size, ret;
207  AVCodecContext *c;
208 
209  c = st->codec;
210 
211  if (oc->oformat->flags & AVFMT_RAWPICTURE) {
212  /* raw video case. The API will change slightly in the near
213  future for that */
214  AVPacket pkt;
215 
216  av_init_packet(&pkt);
217 
218  pkt.flags |= PKT_FLAG_KEY;
219  pkt.stream_index = st->index;
220  pkt.data = (uint8_t *) picture;
221  pkt.size = sizeof(AVPicture);
222 
223  ret = av_write_frame(oc, &pkt);
224  }
225  else {
226  /* encode the image */
227  out_size =
228  avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
229  /* if zero size, it means the image was buffered */
230  if (out_size > 0) {
231  AVPacket pkt;
232 
233  av_init_packet(&pkt);
234 
235  pkt.pts =
236  av_rescale_q(c->coded_frame->pts, c->time_base,
237  st->time_base);
238  if (c->coded_frame->key_frame)
239  pkt.flags |= PKT_FLAG_KEY;
240  pkt.stream_index = st->index;
241  pkt.data = video_outbuf;
242  pkt.size = out_size;
243 
244  /* write the compressed frame in the media file */
245  ret = av_write_frame(oc, &pkt);
246  }
247  else {
248  ret = 0;
249  }
250  }
251  if (ret != 0) {
252  G_warning(_("Error while writing video frame"));
253  return;
254  }
255  frame_count++;
256 }
257 
264 static void close_video(AVFormatContext * oc, AVStream * st)
265 {
266  avcodec_close(st->codec);
267  av_free(picture->data[0]);
268  av_free(picture);
269  if (tmp_picture) {
270  av_free(tmp_picture->data[0]);
271  av_free(tmp_picture);
272  }
273  av_free(video_outbuf);
274 }
275 
276 #endif
277 
286 int gsd_init_mpeg(const char *filename)
287 {
288 #ifdef HAVE_FFMPEG
289  GLuint l, r, b, t;
290  GLint tmp[4];
291 
292  glGetIntegerv(GL_VIEWPORT, tmp);
293  l = tmp[0];
294  r = tmp[0] + tmp[2] - 1;
295  b = tmp[1];
296  t = tmp[1] + tmp[3] - 1;
297 
298  G_verbose_message(_("Opening MPEG stream <%s>..."), filename);
299 
300  /* initialize libavcodec, and register all codecs and formats */
301  av_register_all();
302 
303  /* auto detect the output format from the name. default is mpeg. */
304  fmt = guess_format(NULL, filename, NULL);
305  if (!fmt) {
306  G_warning(_("Unable to deduce output format from file extension: using MPEG"));
307  fmt = guess_format("mpeg", NULL, NULL);
308  }
309  if (!fmt) {
310  G_warning(_("Unable to find suitable output format"));
311  return (-1);
312  }
313 
314  /* allocate the output media context */
315  oc = av_alloc_format_context();
316  if (!oc) {
317  G_warning(_("Out of memory"));
318  return (-1);
319  }
320  oc->oformat = fmt;
321  snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
322 
323  /* if you want to hardcode the codec (eg #ifdef USE_XVID)
324  this may be the place to do it (?????) */
325 #ifdef USE_XVID
326  fmt->video_codec = CODEC_ID_XVID;
327 #endif
328 
329  video_st = NULL;
330  if (fmt->video_codec != CODEC_ID_NONE) {
331  video_st =
332  add_video_stream(oc, fmt->video_codec, (r - l + 1), (t - b + 1));
333  }
334 
335  /* set the output parameters (must be done even if no parameters). */
336  if (av_set_parameters(oc, NULL) < 0) {
337  G_warning(_("Invalid output format parameters"));
338  return (-1);
339  }
340 
341  dump_format(oc, 0, filename, 1);
342 
343  /* now that all the parameters are set, we can open the audio and
344  video codecs and allocate the necessary encode buffers */
345  if (video_st)
346  open_video(oc, video_st);
347 
348  /* open the output file, if needed */
349  if (!(fmt->flags & AVFMT_NOFILE)) {
350  if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
351  G_warning(_("Unable to open <%s>"), filename);
352  return (-1);
353  }
354  }
355 
356  /* write the stream header, if any */
357  av_write_header(oc);
358 
359 
360 #else
361  G_warning(_("NVIZ has not been built with MPEG output support"));
362  return (-1);
363 #endif
364  return (0);
365 }
366 
375 {
376 #ifdef HAVE_FFMPEG
377  unsigned int xsize, ysize;
378  int x, y, xy, xy_uv;
379  int yy, uu, vv;
380  unsigned char *pixbuf;
381 
382  gsd_getimage(&pixbuf, &xsize, &ysize);
383  xy = xy_uv = 0;
384  for (y = ysize - 1; y >= 0; y--) {
385  for (x = 0; x < xsize; x++) {
386  unsigned char r = pixbuf[(y * xsize + x) * 4 + 0];
387  unsigned char g = pixbuf[(y * xsize + x) * 4 + 1];
388  unsigned char b = pixbuf[(y * xsize + x) * 4 + 2];
389 
390  yy = (0.257 * r) + (0.504 * g) + (0.098 * b) + 16;;
391  vv = (0.439 * r) - (0.368 * g) - (0.071 * b) + 128;
392  uu = -(0.148 * r) - (0.291 * g) + (0.439 * b) + 128;
393  fflush(stdout);
394  picture->data[0][xy] = yy;
395 
396  if ((x % 2) && (y % 2)) {
397  picture->data[1][xy_uv] = uu;
398  picture->data[2][xy_uv] = vv;
399  xy_uv++;
400  }
401 
402  xy++;
403  }
404 
405  }
406  G_free(pixbuf);
407 
408  write_video_frame(oc, video_st);
409 
410 
411 #endif
412 
413  return (0);
414 }
415 
421 int gsd_close_mpeg(void)
422 {
423 #ifdef HAVE_FFMPEG
424  int i;
425 
426  close_video(oc, video_st);
427 
428  /* write the trailer, if any */
429  av_write_trailer(oc);
430 
431  /* free the streams */
432  for (i = 0; i < oc->nb_streams; i++) {
433  av_freep(&oc->streams[i]->codec);
434  av_freep(&oc->streams[i]);
435  }
436 
437  if (!(fmt->flags & AVFMT_NOFILE)) {
438  /* close the output file */
439 #if (LIBAVFORMAT_VERSION_INT>>16) < 52
440  url_fclose(&oc->pb);
441 #else
442  url_fclose(oc->pb);
443 #endif
444  }
445 
446  /* free the stream */
447  av_free(oc);
448 
449 
450  G_debug(3, "Closed MPEG stream");
451 #endif
452 
453  return (0);
454 }