diff options
author | Vincent Torri <vincent.torri@gmail.com> | 2010-10-20 07:59:20 +0000 |
---|---|---|
committer | Vincent Torri <vincent.torri@gmail.com> | 2010-10-20 07:59:20 +0000 |
commit | 3382c0d5961be6c07e438076408b863eeda7c158 (patch) | |
tree | 313e7293cbf46ab11834d7228b5b0aac587e02ed /legacy/emotion | |
parent | 2d28628c95718de0e9ea89fd1e8db321ea8283b1 (diff) |
the gstreamer backend uses now gstreamer playbin2 element to
create automatically the pipeline. The code is simpler and
there is no need of the specific pipelines anymore.
SVN revision: 53649
Diffstat (limited to 'legacy/emotion')
10 files changed, 416 insertions, 1250 deletions
diff --git a/legacy/emotion/src/modules/gstreamer/Makefile.am b/legacy/emotion/src/modules/gstreamer/Makefile.am index e38bb0dca8..a25daf35ba 100644 --- a/legacy/emotion/src/modules/gstreamer/Makefile.am +++ b/legacy/emotion/src/modules/gstreamer/Makefile.am | |||
@@ -20,12 +20,7 @@ pkg_LTLIBRARIES = gstreamer.la | |||
20 | gstreamer_la_SOURCES = \ | 20 | gstreamer_la_SOURCES = \ |
21 | emotion_gstreamer.c \ | 21 | emotion_gstreamer.c \ |
22 | emotion_gstreamer_pipeline.c \ | 22 | emotion_gstreamer_pipeline.c \ |
23 | emotion_gstreamer_pipeline.h \ | 23 | emotion_gstreamer_pipeline.h |
24 | emotion_gstreamer_pipeline_cdda.c \ | ||
25 | emotion_gstreamer_pipeline_dvd.c \ | ||
26 | emotion_gstreamer_pipeline_file.c \ | ||
27 | emotion_gstreamer_pipeline_uri.c \ | ||
28 | emotion_gstreamer_pipeline_v4l.c | ||
29 | gstreamer_la_LIBADD = @GSTREAMER_LIBS@ $(top_builddir)/src/lib/libemotion.la | 24 | gstreamer_la_LIBADD = @GSTREAMER_LIBS@ $(top_builddir)/src/lib/libemotion.la |
30 | gstreamer_la_LDFLAGS = -no-undefined @lt_enable_auto_import@ -module -avoid-version | 25 | gstreamer_la_LDFLAGS = -no-undefined @lt_enable_auto_import@ -module -avoid-version |
31 | gstreamer_la_LIBTOOLFLAGS = --tag=disable-static | 26 | gstreamer_la_LIBTOOLFLAGS = --tag=disable-static |
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c index 435e1bab57..d4705689bd 100644 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c +++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.c | |||
@@ -233,8 +233,8 @@ static Emotion_Video_Module em_module = | |||
233 | }; | 233 | }; |
234 | 234 | ||
235 | static unsigned char | 235 | static unsigned char |
236 | em_init(Evas_Object *obj, | 236 | em_init(Evas_Object *obj, |
237 | void **emotion_video, | 237 | void **emotion_video, |
238 | Emotion_Module_Options *opt __UNUSED__) | 238 | Emotion_Module_Options *opt __UNUSED__) |
239 | { | 239 | { |
240 | Emotion_Gstreamer_Video *ev; | 240 | Emotion_Gstreamer_Video *ev; |
@@ -255,8 +255,6 @@ em_init(Evas_Object *obj, | |||
255 | 255 | ||
256 | /* Default values */ | 256 | /* Default values */ |
257 | ev->ratio = 1.0; | 257 | ev->ratio = 1.0; |
258 | ev->video_sink_nbr = 0; | ||
259 | ev->audio_sink_nbr = 0; | ||
260 | ev->vis = EMOTION_VIS_NONE; | 258 | ev->vis = EMOTION_VIS_NONE; |
261 | ev->volume = 0.8; | 259 | ev->volume = 0.8; |
262 | 260 | ||
@@ -279,8 +277,8 @@ static int | |||
279 | em_shutdown(void *video) | 277 | em_shutdown(void *video) |
280 | { | 278 | { |
281 | Emotion_Gstreamer_Video *ev; | 279 | Emotion_Gstreamer_Video *ev; |
282 | Emotion_Audio_Sink *asink; | 280 | Emotion_Audio_Stream *astream; |
283 | Emotion_Video_Sink *vsink; | 281 | Emotion_Video_Stream *vstream; |
284 | 282 | ||
285 | ev = (Emotion_Gstreamer_Video *)video; | 283 | ev = (Emotion_Gstreamer_Video *)video; |
286 | if (!ev) | 284 | if (!ev) |
@@ -291,10 +289,12 @@ em_shutdown(void *video) | |||
291 | /* FIXME: and the evas object ? */ | 289 | /* FIXME: and the evas object ? */ |
292 | if (ev->obj_data) free(ev->obj_data); | 290 | if (ev->obj_data) free(ev->obj_data); |
293 | 291 | ||
294 | EINA_LIST_FREE(ev->audio_sinks, asink) | 292 | EINA_LIST_FREE(ev->audio_streams, astream) |
295 | free(asink); | 293 | free(astream); |
296 | EINA_LIST_FREE(ev->video_sinks, vsink) | 294 | EINA_LIST_FREE(ev->video_streams, vstream) |
297 | free(vsink); | 295 | free(vstream); |
296 | |||
297 | gst_deinit(); | ||
298 | 298 | ||
299 | free(ev); | 299 | free(ev); |
300 | 300 | ||
@@ -311,127 +311,37 @@ em_file_open(const char *file, | |||
311 | 311 | ||
312 | ev = (Emotion_Gstreamer_Video *)video; | 312 | ev = (Emotion_Gstreamer_Video *)video; |
313 | 313 | ||
314 | ev->pipeline = gst_pipeline_new("pipeline"); | 314 | if (!_emotion_pipeline_build(ev, file)) |
315 | if (!ev->pipeline) | 315 | return EINA_FALSE; |
316 | return 0; | ||
317 | |||
318 | ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline)); | ||
319 | if (!ev->eos_bus) | ||
320 | { | ||
321 | gst_object_unref(ev->pipeline); | ||
322 | return 0; | ||
323 | } | ||
324 | 316 | ||
325 | /* Evas Object */ | 317 | /* Evas Object */ |
326 | ev->obj = obj; | 318 | ev->obj = obj; |
327 | 319 | ||
328 | /* CD Audio */ | ||
329 | if (strstr(file, "cdda://")) | ||
330 | { | ||
331 | const char *device = NULL; | ||
332 | unsigned int track = 1; | ||
333 | |||
334 | device = file + strlen("cdda://"); | ||
335 | if (device[0] == '/') | ||
336 | { | ||
337 | char *tmp; | ||
338 | |||
339 | if ((tmp = strchr(device, '?')) || (tmp = strchr(device, '#'))) | ||
340 | { | ||
341 | sscanf(tmp + 1, "%d", &track); | ||
342 | tmp[0] = '\0'; | ||
343 | } | ||
344 | } | ||
345 | else | ||
346 | { | ||
347 | device = NULL; | ||
348 | sscanf(file, "cdda://%d", &track); | ||
349 | } | ||
350 | DBG("Build CD Audio pipeline"); | ||
351 | if (!(emotion_pipeline_cdda_build(ev, device, track))) | ||
352 | { | ||
353 | ERR("Could not build CD Audio pipeline"); | ||
354 | gst_object_unref(ev->pipeline); | ||
355 | return 0; | ||
356 | } | ||
357 | } | ||
358 | /* Dvd */ | ||
359 | else if (strstr(file, "dvd://")) | ||
360 | { | ||
361 | |||
362 | DBG("Build DVD pipeline"); | ||
363 | if (!(emotion_pipeline_dvd_build(ev, NULL))) | ||
364 | { | ||
365 | ERR("Could not build DVD pipeline"); | ||
366 | gst_object_unref(ev->pipeline); | ||
367 | return 0; | ||
368 | } | ||
369 | } | ||
370 | /* http */ | ||
371 | else if (strstr(file, "http://")) | ||
372 | { | ||
373 | DBG("Build URI pipeline"); | ||
374 | if (!(emotion_pipeline_uri_build(ev, file))) | ||
375 | { | ||
376 | ERR("Could not build URI pipeline"); | ||
377 | gst_object_unref(ev->pipeline); | ||
378 | return 0; | ||
379 | } | ||
380 | } | ||
381 | /* v4l */ | ||
382 | else if (strstr(file, "v4l://")) | ||
383 | { | ||
384 | DBG("Build V4L pipeline"); | ||
385 | if (!(emotion_pipeline_v4l_build(ev, file))) | ||
386 | { | ||
387 | ERR("Could not build V4L pipeline"); | ||
388 | gst_object_unref(ev->pipeline); | ||
389 | return 0; | ||
390 | } | ||
391 | } | ||
392 | /* Normal media file */ | ||
393 | else | ||
394 | { | ||
395 | const char *filename; | ||
396 | |||
397 | filename = strstr(file, "file://") | ||
398 | ? file + strlen("file://") | ||
399 | : file; | ||
400 | |||
401 | DBG("Build file pipeline"); | ||
402 | if (!(emotion_pipeline_file_build(ev, filename))) | ||
403 | { | ||
404 | ERR("Could not build File pipeline"); | ||
405 | gst_object_unref(ev->pipeline); | ||
406 | return 0; | ||
407 | } | ||
408 | } | ||
409 | |||
410 | ev->position = 0.0; | 320 | ev->position = 0.0; |
411 | 321 | ||
412 | { | 322 | { |
413 | /* on recapitule : */ | 323 | /* on recapitule : */ |
414 | Emotion_Video_Sink *vsink; | 324 | Emotion_Video_Stream *vstream; |
415 | Emotion_Audio_Sink *asink; | 325 | Emotion_Audio_Stream *astream; |
416 | 326 | ||
417 | vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks); | 327 | vstream = (Emotion_Video_Stream *)eina_list_data_get(ev->video_streams); |
418 | if (vsink) | 328 | if (vstream) |
419 | { | 329 | { |
420 | DBG("video size=%dx%d, fps=%d/%d, " | 330 | DBG("video size=%dx%d, fps=%d/%d, " |
421 | "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT, | 331 | "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT, |
422 | vsink->width, vsink->height, vsink->fps_num, vsink->fps_den, | 332 | vstream->width, vstream->height, vstream->fps_num, vstream->fps_den, |
423 | GST_FOURCC_ARGS(vsink->fourcc), | 333 | GST_FOURCC_ARGS(vstream->fourcc), |
424 | GST_TIME_ARGS((guint64)(vsink->length_time * GST_SECOND))); | 334 | GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND))); |
425 | } | 335 | } |
426 | 336 | ||
427 | asink = (Emotion_Audio_Sink *)eina_list_data_get(ev->audio_sinks); | 337 | astream = (Emotion_Audio_Stream *)eina_list_data_get(ev->audio_streams); |
428 | if (asink) | 338 | if (astream) |
429 | { | 339 | { |
430 | DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT, | 340 | DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT, |
431 | asink->channels, asink->samplerate, | 341 | astream->channels, astream->samplerate, |
432 | GST_TIME_ARGS((guint64)(asink->length_time * GST_SECOND))); | 342 | GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND))); |
433 | } | 343 | } |
434 | } | 344 | } |
435 | 345 | ||
436 | if (ev->metadata) | 346 | if (ev->metadata) |
437 | _free_metadata(ev->metadata); | 347 | _free_metadata(ev->metadata); |
@@ -449,18 +359,18 @@ static void | |||
449 | em_file_close(void *video) | 359 | em_file_close(void *video) |
450 | { | 360 | { |
451 | Emotion_Gstreamer_Video *ev; | 361 | Emotion_Gstreamer_Video *ev; |
452 | Emotion_Audio_Sink *asink; | 362 | Emotion_Audio_Stream *astream; |
453 | Emotion_Video_Sink *vsink; | 363 | Emotion_Video_Stream *vstream; |
454 | 364 | ||
455 | ev = (Emotion_Gstreamer_Video *)video; | 365 | ev = (Emotion_Gstreamer_Video *)video; |
456 | if (!ev) | 366 | if (!ev) |
457 | return; | 367 | return; |
458 | 368 | ||
459 | /* we clear the sink lists */ | 369 | /* we clear the stream lists */ |
460 | EINA_LIST_FREE(ev->audio_sinks, asink) | 370 | EINA_LIST_FREE(ev->audio_streams, astream) |
461 | free(asink); | 371 | free(astream); |
462 | EINA_LIST_FREE(ev->video_sinks, vsink) | 372 | EINA_LIST_FREE(ev->video_streams, vstream) |
463 | free(vsink); | 373 | free(vstream); |
464 | 374 | ||
465 | /* shutdown eos */ | 375 | /* shutdown eos */ |
466 | if (ev->eos_timer) | 376 | if (ev->eos_timer) |
@@ -528,15 +438,15 @@ em_size_get(void *video, | |||
528 | int *height) | 438 | int *height) |
529 | { | 439 | { |
530 | Emotion_Gstreamer_Video *ev; | 440 | Emotion_Gstreamer_Video *ev; |
531 | Emotion_Video_Sink *vsink; | 441 | Emotion_Video_Stream *vstream; |
532 | 442 | ||
533 | ev = (Emotion_Gstreamer_Video *)video; | 443 | ev = (Emotion_Gstreamer_Video *)video; |
534 | 444 | ||
535 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 445 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
536 | if (vsink) | 446 | if (vstream) |
537 | { | 447 | { |
538 | if (width) *width = vsink->width; | 448 | if (width) *width = vstream->width; |
539 | if (height) *height = vsink->height; | 449 | if (height) *height = vstream->height; |
540 | } | 450 | } |
541 | else | 451 | else |
542 | { | 452 | { |
@@ -550,17 +460,19 @@ em_pos_set(void *video, | |||
550 | double pos) | 460 | double pos) |
551 | { | 461 | { |
552 | Emotion_Gstreamer_Video *ev; | 462 | Emotion_Gstreamer_Video *ev; |
553 | Emotion_Video_Sink *vsink; | 463 | GstElement *vsink; |
554 | Emotion_Audio_Sink *asink; | 464 | GstElement *asink; |
555 | 465 | ||
556 | ev = (Emotion_Gstreamer_Video *)video; | 466 | ev = (Emotion_Gstreamer_Video *)video; |
557 | 467 | ||
558 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 468 | g_object_get (G_OBJECT (ev->pipeline), |
559 | asink = (Emotion_Audio_Sink *)eina_list_nth(ev->audio_sinks, ev->audio_sink_nbr); | 469 | "video-sink", &vsink, |
470 | "audio-sink", &asink, | ||
471 | NULL); | ||
560 | 472 | ||
561 | if (vsink) | 473 | if (vsink) |
562 | { | 474 | { |
563 | gst_element_seek(vsink->sink, 1.0, | 475 | gst_element_seek(vsink, 1.0, |
564 | GST_FORMAT_TIME, | 476 | GST_FORMAT_TIME, |
565 | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, | 477 | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, |
566 | GST_SEEK_TYPE_SET, | 478 | GST_SEEK_TYPE_SET, |
@@ -569,7 +481,7 @@ em_pos_set(void *video, | |||
569 | } | 481 | } |
570 | if (asink) | 482 | if (asink) |
571 | { | 483 | { |
572 | gst_element_seek(asink->sink, 1.0, | 484 | gst_element_seek(asink, 1.0, |
573 | GST_FORMAT_TIME, | 485 | GST_FORMAT_TIME, |
574 | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, | 486 | GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH, |
575 | GST_SEEK_TYPE_SET, | 487 | GST_SEEK_TYPE_SET, |
@@ -582,8 +494,8 @@ static double | |||
582 | em_len_get(void *video) | 494 | em_len_get(void *video) |
583 | { | 495 | { |
584 | Emotion_Gstreamer_Video *ev; | 496 | Emotion_Gstreamer_Video *ev; |
585 | Emotion_Video_Sink *vsink; | 497 | Emotion_Video_Stream *vstream; |
586 | Emotion_Audio_Sink *asink; | 498 | Emotion_Audio_Stream *astream; |
587 | Eina_List *l; | 499 | Eina_List *l; |
588 | GstFormat fmt; | 500 | GstFormat fmt; |
589 | gint64 val; | 501 | gint64 val; |
@@ -608,13 +520,13 @@ em_len_get(void *video) | |||
608 | return val / 1000000000.0; | 520 | return val / 1000000000.0; |
609 | 521 | ||
610 | fallback: | 522 | fallback: |
611 | EINA_LIST_FOREACH(ev->audio_sinks, l, asink) | 523 | EINA_LIST_FOREACH(ev->audio_streams, l, astream) |
612 | if (asink->length_time >= 0) | 524 | if (astream->length_time >= 0) |
613 | return asink->length_time; | 525 | return astream->length_time; |
614 | 526 | ||
615 | EINA_LIST_FOREACH(ev->video_sinks, l, vsink) | 527 | EINA_LIST_FOREACH(ev->video_streams, l, vstream) |
616 | if (vsink->length_time >= 0) | 528 | if (vstream->length_time >= 0) |
617 | return vsink->length_time; | 529 | return vstream->length_time; |
618 | 530 | ||
619 | return 0.0; | 531 | return 0.0; |
620 | } | 532 | } |
@@ -623,13 +535,13 @@ static int | |||
623 | em_fps_num_get(void *video) | 535 | em_fps_num_get(void *video) |
624 | { | 536 | { |
625 | Emotion_Gstreamer_Video *ev; | 537 | Emotion_Gstreamer_Video *ev; |
626 | Emotion_Video_Sink *vsink; | 538 | Emotion_Video_Stream *vstream; |
627 | 539 | ||
628 | ev = (Emotion_Gstreamer_Video *)video; | 540 | ev = (Emotion_Gstreamer_Video *)video; |
629 | 541 | ||
630 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 542 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
631 | if (vsink) | 543 | if (vstream) |
632 | return vsink->fps_num; | 544 | return vstream->fps_num; |
633 | 545 | ||
634 | return 0; | 546 | return 0; |
635 | } | 547 | } |
@@ -638,13 +550,13 @@ static int | |||
638 | em_fps_den_get(void *video) | 550 | em_fps_den_get(void *video) |
639 | { | 551 | { |
640 | Emotion_Gstreamer_Video *ev; | 552 | Emotion_Gstreamer_Video *ev; |
641 | Emotion_Video_Sink *vsink; | 553 | Emotion_Video_Stream *vstream; |
642 | 554 | ||
643 | ev = (Emotion_Gstreamer_Video *)video; | 555 | ev = (Emotion_Gstreamer_Video *)video; |
644 | 556 | ||
645 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 557 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
646 | if (vsink) | 558 | if (vstream) |
647 | return vsink->fps_den; | 559 | return vstream->fps_den; |
648 | 560 | ||
649 | return 1; | 561 | return 1; |
650 | } | 562 | } |
@@ -653,13 +565,13 @@ static double | |||
653 | em_fps_get(void *video) | 565 | em_fps_get(void *video) |
654 | { | 566 | { |
655 | Emotion_Gstreamer_Video *ev; | 567 | Emotion_Gstreamer_Video *ev; |
656 | Emotion_Video_Sink *vsink; | 568 | Emotion_Video_Stream *vstream; |
657 | 569 | ||
658 | ev = (Emotion_Gstreamer_Video *)video; | 570 | ev = (Emotion_Gstreamer_Video *)video; |
659 | 571 | ||
660 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 572 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
661 | if (vsink) | 573 | if (vstream) |
662 | return (double)vsink->fps_num / (double)vsink->fps_den; | 574 | return (double)vstream->fps_num / (double)vstream->fps_den; |
663 | 575 | ||
664 | return 0.0; | 576 | return 0.0; |
665 | } | 577 | } |
@@ -749,7 +661,7 @@ em_video_handled(void *video) | |||
749 | 661 | ||
750 | ev = (Emotion_Gstreamer_Video *)video; | 662 | ev = (Emotion_Gstreamer_Video *)video; |
751 | 663 | ||
752 | if (!eina_list_count(ev->video_sinks)) | 664 | if (!eina_list_count(ev->video_streams)) |
753 | return 0; | 665 | return 0; |
754 | 666 | ||
755 | return 1; | 667 | return 1; |
@@ -762,7 +674,7 @@ em_audio_handled(void *video) | |||
762 | 674 | ||
763 | ev = (Emotion_Gstreamer_Video *)video; | 675 | ev = (Emotion_Gstreamer_Video *)video; |
764 | 676 | ||
765 | if (!eina_list_count(ev->audio_sinks)) | 677 | if (!eina_list_count(ev->audio_streams)) |
766 | return 0; | 678 | return 0; |
767 | 679 | ||
768 | return 1; | 680 | return 1; |
@@ -783,14 +695,14 @@ static Emotion_Format | |||
783 | em_format_get(void *video) | 695 | em_format_get(void *video) |
784 | { | 696 | { |
785 | Emotion_Gstreamer_Video *ev; | 697 | Emotion_Gstreamer_Video *ev; |
786 | Emotion_Video_Sink *vsink; | 698 | Emotion_Video_Stream *vstream; |
787 | 699 | ||
788 | ev = (Emotion_Gstreamer_Video *)video; | 700 | ev = (Emotion_Gstreamer_Video *)video; |
789 | 701 | ||
790 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 702 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
791 | if (vsink) | 703 | if (vstream) |
792 | { | 704 | { |
793 | switch (vsink->fourcc) | 705 | switch (vstream->fourcc) |
794 | { | 706 | { |
795 | case GST_MAKE_FOURCC('I', '4', '2', '0'): | 707 | case GST_MAKE_FOURCC('I', '4', '2', '0'): |
796 | return EMOTION_FORMAT_I420; | 708 | return EMOTION_FORMAT_I420; |
@@ -811,15 +723,15 @@ static void | |||
811 | em_video_data_size_get(void *video, int *w, int *h) | 723 | em_video_data_size_get(void *video, int *w, int *h) |
812 | { | 724 | { |
813 | Emotion_Gstreamer_Video *ev; | 725 | Emotion_Gstreamer_Video *ev; |
814 | Emotion_Video_Sink *vsink; | 726 | Emotion_Video_Stream *vstream; |
815 | 727 | ||
816 | ev = (Emotion_Gstreamer_Video *)video; | 728 | ev = (Emotion_Gstreamer_Video *)video; |
817 | 729 | ||
818 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 730 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
819 | if (vsink) | 731 | if (vstream) |
820 | { | 732 | { |
821 | *w = vsink->width; | 733 | *w = vstream->width; |
822 | *h = vsink->height; | 734 | *h = vstream->height; |
823 | } | 735 | } |
824 | else | 736 | else |
825 | { | 737 | { |
@@ -912,7 +824,7 @@ em_video_channel_count(void *video) | |||
912 | 824 | ||
913 | ev = (Emotion_Gstreamer_Video *)video; | 825 | ev = (Emotion_Gstreamer_Video *)video; |
914 | 826 | ||
915 | return eina_list_count(ev->video_sinks); | 827 | return eina_list_count(ev->video_streams); |
916 | } | 828 | } |
917 | 829 | ||
918 | static void | 830 | static void |
@@ -936,7 +848,7 @@ em_video_channel_get(void *video) | |||
936 | 848 | ||
937 | ev = (Emotion_Gstreamer_Video *)video; | 849 | ev = (Emotion_Gstreamer_Video *)video; |
938 | 850 | ||
939 | return ev->video_sink_nbr; | 851 | return ev->video_stream_nbr; |
940 | } | 852 | } |
941 | 853 | ||
942 | static const char * | 854 | static const char * |
@@ -976,7 +888,7 @@ em_audio_channel_count(void *video) | |||
976 | 888 | ||
977 | ev = (Emotion_Gstreamer_Video *)video; | 889 | ev = (Emotion_Gstreamer_Video *)video; |
978 | 890 | ||
979 | return eina_list_count(ev->audio_sinks); | 891 | return eina_list_count(ev->audio_streams); |
980 | } | 892 | } |
981 | 893 | ||
982 | static void | 894 | static void |
@@ -1000,7 +912,7 @@ em_audio_channel_get(void *video) | |||
1000 | 912 | ||
1001 | ev = (Emotion_Gstreamer_Video *)video; | 913 | ev = (Emotion_Gstreamer_Video *)video; |
1002 | 914 | ||
1003 | return ev->audio_sink_nbr; | 915 | return ev->audio_stream_nbr; |
1004 | } | 916 | } |
1005 | 917 | ||
1006 | static const char * | 918 | static const char * |
@@ -1015,7 +927,6 @@ em_audio_channel_mute_set(void *video, | |||
1015 | int mute) | 927 | int mute) |
1016 | { | 928 | { |
1017 | Emotion_Gstreamer_Video *ev; | 929 | Emotion_Gstreamer_Video *ev; |
1018 | GstElement *volume; | ||
1019 | 930 | ||
1020 | ev = (Emotion_Gstreamer_Video *)video; | 931 | ev = (Emotion_Gstreamer_Video *)video; |
1021 | 932 | ||
@@ -1023,15 +934,11 @@ em_audio_channel_mute_set(void *video, | |||
1023 | return; | 934 | return; |
1024 | 935 | ||
1025 | ev->audio_mute = mute; | 936 | ev->audio_mute = mute; |
1026 | volume = gst_bin_get_by_name(GST_BIN(ev->pipeline), "volume"); | ||
1027 | if (!volume) return; | ||
1028 | 937 | ||
1029 | if (mute) | 938 | if (mute) |
1030 | g_object_set(G_OBJECT(volume), "volume", 0.0, NULL); | 939 | g_object_set(G_OBJECT(ev->pipeline), "mute", 1, NULL); |
1031 | else | 940 | else |
1032 | g_object_set(G_OBJECT(volume), "volume", ev->volume, NULL); | 941 | g_object_set(G_OBJECT(ev->pipeline), "mute", 0, NULL); |
1033 | |||
1034 | gst_object_unref(volume); | ||
1035 | } | 942 | } |
1036 | 943 | ||
1037 | static int | 944 | static int |
@@ -1049,7 +956,6 @@ em_audio_channel_volume_set(void *video, | |||
1049 | double vol) | 956 | double vol) |
1050 | { | 957 | { |
1051 | Emotion_Gstreamer_Video *ev; | 958 | Emotion_Gstreamer_Video *ev; |
1052 | GstElement *volume; | ||
1053 | 959 | ||
1054 | ev = (Emotion_Gstreamer_Video *)video; | 960 | ev = (Emotion_Gstreamer_Video *)video; |
1055 | 961 | ||
@@ -1058,10 +964,7 @@ em_audio_channel_volume_set(void *video, | |||
1058 | if (vol > 1.0) | 964 | if (vol > 1.0) |
1059 | vol = 1.0; | 965 | vol = 1.0; |
1060 | ev->volume = vol; | 966 | ev->volume = vol; |
1061 | volume = gst_bin_get_by_name(GST_BIN(ev->pipeline), "volume"); | 967 | g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL); |
1062 | if (!volume) return; | ||
1063 | g_object_set(G_OBJECT(volume), "volume", vol, NULL); | ||
1064 | gst_object_unref(volume); | ||
1065 | } | 968 | } |
1066 | 969 | ||
1067 | static double | 970 | static double |
@@ -1380,15 +1283,15 @@ static void | |||
1380 | _em_buffer_read(void *data, void *buf, unsigned int nbyte __UNUSED__) | 1283 | _em_buffer_read(void *data, void *buf, unsigned int nbyte __UNUSED__) |
1381 | { | 1284 | { |
1382 | Emotion_Gstreamer_Video *ev; | 1285 | Emotion_Gstreamer_Video *ev; |
1383 | Emotion_Video_Sink *vsink; | 1286 | Emotion_Video_Stream *vstream; |
1384 | 1287 | ||
1385 | ev = (Emotion_Gstreamer_Video *)data; | 1288 | ev = (Emotion_Gstreamer_Video *)data; |
1386 | _emotion_frame_new(ev->obj); | 1289 | _emotion_frame_new(ev->obj); |
1387 | vsink = (Emotion_Video_Sink *)eina_list_nth(ev->video_sinks, ev->video_sink_nbr); | 1290 | vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1); |
1388 | if (vsink) | 1291 | if (vstream) |
1389 | { | 1292 | { |
1390 | _emotion_video_pos_update(ev->obj, ev->position, vsink->length_time); | 1293 | _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time); |
1391 | _emotion_frame_resize(ev->obj, vsink->width, vsink->height, ev->ratio); | 1294 | _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio); |
1392 | } | 1295 | } |
1393 | } | 1296 | } |
1394 | 1297 | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h index 4892525513..79a2b9f715 100644 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h +++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer.h | |||
@@ -12,11 +12,10 @@ | |||
12 | #include "emotion_private.h" | 12 | #include "emotion_private.h" |
13 | 13 | ||
14 | 14 | ||
15 | typedef struct _Emotion_Video_Sink Emotion_Video_Sink; | 15 | typedef struct _Emotion_Video_Stream Emotion_Video_Stream; |
16 | 16 | ||
17 | struct _Emotion_Video_Sink | 17 | struct _Emotion_Video_Stream |
18 | { | 18 | { |
19 | GstElement *sink; | ||
20 | gdouble length_time; | 19 | gdouble length_time; |
21 | gint width; | 20 | gint width; |
22 | gint height; | 21 | gint height; |
@@ -25,11 +24,10 @@ struct _Emotion_Video_Sink | |||
25 | guint32 fourcc; | 24 | guint32 fourcc; |
26 | }; | 25 | }; |
27 | 26 | ||
28 | typedef struct _Emotion_Audio_Sink Emotion_Audio_Sink; | 27 | typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream; |
29 | 28 | ||
30 | struct _Emotion_Audio_Sink | 29 | struct _Emotion_Audio_Stream |
31 | { | 30 | { |
32 | GstElement *sink; | ||
33 | gdouble length_time; | 31 | gdouble length_time; |
34 | gint channels; | 32 | gint channels; |
35 | gint samplerate; | 33 | gint samplerate; |
@@ -61,12 +59,12 @@ struct _Emotion_Gstreamer_Video | |||
61 | GstBus *eos_bus; | 59 | GstBus *eos_bus; |
62 | Ecore_Timer *eos_timer; | 60 | Ecore_Timer *eos_timer; |
63 | 61 | ||
64 | /* Sinks */ | 62 | /* Strams */ |
65 | Eina_List *video_sinks; | 63 | Eina_List *video_streams; |
66 | Eina_List *audio_sinks; | 64 | Eina_List *audio_streams; |
67 | 65 | ||
68 | int video_sink_nbr; | 66 | int video_stream_nbr; |
69 | int audio_sink_nbr; | 67 | int audio_stream_nbr; |
70 | 68 | ||
71 | /* Evas object */ | 69 | /* Evas object */ |
72 | Evas_Object *obj; | 70 | Evas_Object *obj; |
@@ -86,10 +84,10 @@ struct _Emotion_Gstreamer_Video | |||
86 | 84 | ||
87 | Emotion_Vis vis; | 85 | Emotion_Vis vis; |
88 | 86 | ||
89 | unsigned char play : 1; | 87 | unsigned char play : 1; |
90 | unsigned char play_started : 1; | 88 | unsigned char play_started : 1; |
91 | unsigned char video_mute : 1; | 89 | unsigned char video_mute : 1; |
92 | unsigned char audio_mute : 1; | 90 | unsigned char audio_mute : 1; |
93 | }; | 91 | }; |
94 | 92 | ||
95 | extern int _emotion_gstreamer_log_domain; | 93 | extern int _emotion_gstreamer_log_domain; |
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c index c71bd11dc6..a5208cb261 100644 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c +++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.c | |||
@@ -28,6 +28,292 @@ emotion_pipeline_pause(GstElement *pipeline) | |||
28 | return 1; | 28 | return 1; |
29 | } | 29 | } |
30 | 30 | ||
31 | Eina_Bool | ||
32 | _emotion_pipeline_build(Emotion_Gstreamer_Video *ev, const char *file) | ||
33 | { | ||
34 | GstElement *sink; | ||
35 | int i; | ||
36 | |||
37 | ev->pipeline = gst_element_factory_make("playbin2", "pipeline"); | ||
38 | if (!ev->pipeline) | ||
39 | { | ||
40 | ERR("could not create playbin2 element"); | ||
41 | return EINA_FALSE; | ||
42 | } | ||
43 | |||
44 | ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline)); | ||
45 | if (!ev->eos_bus) | ||
46 | { | ||
47 | ERR("could not create BUS"); | ||
48 | goto unref_pipeline; | ||
49 | } | ||
50 | |||
51 | sink = gst_element_factory_make("fakesink", "videosink"); | ||
52 | if (!sink) | ||
53 | { | ||
54 | ERR("could not create video sink"); | ||
55 | goto unref_pipeline; | ||
56 | } | ||
57 | g_object_set(G_OBJECT(sink), "sync", TRUE, NULL); | ||
58 | g_object_set(G_OBJECT(sink), "signal-handoffs", TRUE, NULL); | ||
59 | g_signal_connect(G_OBJECT(sink), | ||
60 | "handoff", | ||
61 | G_CALLBACK(cb_handoff), ev); | ||
62 | g_object_set(G_OBJECT(ev->pipeline), "video-sink", sink, NULL); | ||
63 | |||
64 | sink = gst_element_factory_make("autoaudiosink", "audiosink"); | ||
65 | if (!sink) | ||
66 | { | ||
67 | ERR("could not create audio sink"); | ||
68 | goto unref_pipeline; | ||
69 | } | ||
70 | g_object_set(G_OBJECT(ev->pipeline), "audio-sink", sink, NULL); | ||
71 | |||
72 | if ((*file == '/') || (*file == '~')) | ||
73 | { | ||
74 | char *uri; | ||
75 | |||
76 | uri = g_filename_to_uri(file, NULL, NULL); | ||
77 | if (uri) | ||
78 | { | ||
79 | DBG("Setting file %s\n", uri); | ||
80 | g_object_set(G_OBJECT(ev->pipeline), "uri", uri, NULL); | ||
81 | free(uri); | ||
82 | } | ||
83 | else | ||
84 | { | ||
85 | ERR("could not create new uri from %s", file); | ||
86 | goto unref_pipeline; | ||
87 | } | ||
88 | } | ||
89 | else | ||
90 | { | ||
91 | DBG("Setting file %s\n", file); | ||
92 | g_object_set(G_OBJECT(ev->pipeline), "uri", file, NULL); | ||
93 | } | ||
94 | |||
95 | if (!emotion_pipeline_pause(ev->pipeline)) | ||
96 | goto unref_pipeline; | ||
97 | |||
98 | g_object_get(G_OBJECT(ev->pipeline), | ||
99 | "n-audio", &ev->audio_stream_nbr, | ||
100 | "n-video", &ev->video_stream_nbr, | ||
101 | NULL); | ||
102 | |||
103 | if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0)) | ||
104 | { | ||
105 | ERR("No audio nor video stream found"); | ||
106 | goto unref_pipeline; | ||
107 | } | ||
108 | |||
109 | /* Video streams */ | ||
110 | |||
111 | for (i = 0; i < ev->video_stream_nbr; i++) | ||
112 | { | ||
113 | GstPad *pad; | ||
114 | GstCaps *caps; | ||
115 | GstStructure *structure; | ||
116 | GstQuery *query; | ||
117 | const GValue *val; | ||
118 | gchar *str; | ||
119 | Eina_Bool build_stream = EINA_FALSE; | ||
120 | |||
121 | gdouble length_time; | ||
122 | gint width; | ||
123 | gint height; | ||
124 | gint fps_num; | ||
125 | gint fps_den; | ||
126 | guint32 fourcc; | ||
127 | |||
128 | g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad); | ||
129 | if (!pad) | ||
130 | continue; | ||
131 | |||
132 | caps = gst_pad_get_negotiated_caps(pad); | ||
133 | if (!caps) | ||
134 | goto unref_pad_v; | ||
135 | structure = gst_caps_get_structure(caps, 0); | ||
136 | str = gst_caps_to_string(caps); | ||
137 | |||
138 | if (!gst_structure_get_int(structure, "width", &width)) | ||
139 | goto unref_caps_v; | ||
140 | if (!gst_structure_get_int(structure, "height", &height)) | ||
141 | goto unref_caps_v; | ||
142 | if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den)) | ||
143 | goto unref_caps_v; | ||
144 | |||
145 | if (g_str_has_prefix(str, "video/x-raw-yuv")) | ||
146 | { | ||
147 | val = gst_structure_get_value(structure, "format"); | ||
148 | fourcc = gst_value_get_fourcc(val); | ||
149 | } | ||
150 | else if (g_str_has_prefix(str, "video/x-raw-rgb")) | ||
151 | fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B'); | ||
152 | else | ||
153 | goto unref_caps_v; | ||
154 | |||
155 | query = gst_query_new_duration(GST_FORMAT_TIME); | ||
156 | if (gst_pad_peer_query(pad, query)) | ||
157 | { | ||
158 | gint64 t; | ||
159 | |||
160 | gst_query_parse_duration(query, NULL, &t); | ||
161 | length_time = (double)t / (double)GST_SECOND; | ||
162 | } | ||
163 | else | ||
164 | goto unref_query_v; | ||
165 | |||
166 | build_stream = EINA_TRUE; | ||
167 | |||
168 | unref_query_v: | ||
169 | gst_query_unref(query); | ||
170 | unref_caps_v: | ||
171 | gst_caps_unref(caps); | ||
172 | unref_pad_v: | ||
173 | gst_object_unref(pad); | ||
174 | |||
175 | if (build_stream) | ||
176 | { | ||
177 | Emotion_Video_Stream *vstream; | ||
178 | |||
179 | vstream = emotion_video_stream_new(ev); | ||
180 | if (!vstream) continue; | ||
181 | |||
182 | vstream->length_time = length_time; | ||
183 | vstream->width = width; | ||
184 | vstream->height = height; | ||
185 | vstream->fps_num = fps_num; | ||
186 | vstream->fps_den = fps_den; | ||
187 | vstream->fourcc = fourcc; | ||
188 | } | ||
189 | } | ||
190 | |||
191 | /* Audio streams */ | ||
192 | |||
193 | for (i = 0; i < ev->audio_stream_nbr; i++) | ||
194 | { | ||
195 | GstPad *pad; | ||
196 | GstCaps *caps; | ||
197 | GstStructure *structure; | ||
198 | GstQuery *query; | ||
199 | Eina_Bool build_stream = EINA_FALSE; | ||
200 | |||
201 | gdouble length_time; | ||
202 | gint channels; | ||
203 | gint samplerate; | ||
204 | |||
205 | g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad); | ||
206 | if (!pad) | ||
207 | continue; | ||
208 | |||
209 | caps = gst_pad_get_negotiated_caps(pad); | ||
210 | if (!caps) | ||
211 | goto unref_pad_a; | ||
212 | structure = gst_caps_get_structure(caps, 0); | ||
213 | |||
214 | if (!gst_structure_get_int(structure, "channels", &channels)) | ||
215 | goto unref_caps_a; | ||
216 | if (!gst_structure_get_int(structure, "rate", &samplerate)) | ||
217 | goto unref_caps_a; | ||
218 | |||
219 | query = gst_query_new_duration(GST_FORMAT_TIME); | ||
220 | if (gst_pad_peer_query(pad, query)) | ||
221 | { | ||
222 | gint64 t; | ||
223 | |||
224 | gst_query_parse_duration(query, NULL, &t); | ||
225 | length_time = (double)t / (double)GST_SECOND; | ||
226 | } | ||
227 | else | ||
228 | goto unref_query_a; | ||
229 | |||
230 | build_stream = EINA_TRUE; | ||
231 | |||
232 | unref_query_a: | ||
233 | gst_query_unref(query); | ||
234 | unref_caps_a: | ||
235 | gst_caps_unref(caps); | ||
236 | unref_pad_a: | ||
237 | gst_object_unref(pad); | ||
238 | |||
239 | if (build_stream) | ||
240 | { | ||
241 | Emotion_Audio_Stream *astream; | ||
242 | |||
243 | astream = (Emotion_Audio_Stream *)calloc(1, sizeof(Emotion_Audio_Stream)); | ||
244 | if (!astream) continue; | ||
245 | ev->audio_streams = eina_list_append(ev->audio_streams, astream); | ||
246 | if (eina_error_get()) | ||
247 | { | ||
248 | free(astream); | ||
249 | continue; | ||
250 | } | ||
251 | |||
252 | astream->length_time = length_time; | ||
253 | astream->channels = channels; | ||
254 | astream->samplerate = samplerate; | ||
255 | } | ||
256 | } | ||
257 | |||
258 | /* Visualization sink */ | ||
259 | |||
260 | if (ev->video_stream_nbr == 0) | ||
261 | { | ||
262 | GstElement *vis = NULL; | ||
263 | Emotion_Video_Stream *vstream; | ||
264 | Emotion_Audio_Stream *astream; | ||
265 | gint flags; | ||
266 | const char *vis_name; | ||
267 | |||
268 | if (!(vis_name = emotion_visualization_element_name_get(ev->vis))) | ||
269 | { | ||
270 | printf ("pb vis name %d\n", ev->vis); | ||
271 | goto finalize; | ||
272 | } | ||
273 | |||
274 | astream = (Emotion_Audio_Stream *)eina_list_data_get(ev->audio_streams); | ||
275 | |||
276 | vis = gst_element_factory_make(vis_name, "vissink"); | ||
277 | vstream = emotion_video_stream_new(ev); | ||
278 | if (!vstream) | ||
279 | goto finalize; | ||
280 | else | ||
281 | DBG("could not create visualization stream"); | ||
282 | |||
283 | vstream->length_time = astream->length_time; | ||
284 | vstream->width = 320; | ||
285 | vstream->height = 200; | ||
286 | vstream->fps_num = 25; | ||
287 | vstream->fps_den = 1; | ||
288 | vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B'); | ||
289 | |||
290 | g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL); | ||
291 | g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); | ||
292 | flags |= 0x00000008; | ||
293 | g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); | ||
294 | } | ||
295 | |||
296 | finalize: | ||
297 | |||
298 | ev->video_stream_nbr = eina_list_count(ev->video_streams); | ||
299 | ev->audio_stream_nbr = eina_list_count(ev->audio_streams); | ||
300 | |||
301 | if (ev->video_stream_nbr == 1) | ||
302 | { | ||
303 | Emotion_Video_Stream *vstream; | ||
304 | |||
305 | vstream = (Emotion_Video_Stream *)eina_list_data_get(ev->video_streams); | ||
306 | ev->ratio = (double)vstream->width / (double)vstream->height; | ||
307 | } | ||
308 | |||
309 | return EINA_TRUE; | ||
310 | |||
311 | unref_pipeline: | ||
312 | gst_object_unref(ev->pipeline); | ||
313 | |||
314 | return EINA_FALSE; | ||
315 | } | ||
316 | |||
31 | /* Send the video frame to the evas object */ | 317 | /* Send the video frame to the evas object */ |
32 | void | 318 | void |
33 | cb_handoff(GstElement *fakesrc __UNUSED__, | 319 | cb_handoff(GstElement *fakesrc __UNUSED__, |
@@ -54,9 +340,9 @@ cb_handoff(GstElement *fakesrc __UNUSED__, | |||
54 | } | 340 | } |
55 | else | 341 | else |
56 | { | 342 | { |
57 | Emotion_Audio_Sink *asink; | 343 | Emotion_Audio_Stream *astream; |
58 | asink = (Emotion_Audio_Sink *)eina_list_nth(ev->audio_sinks, ev->audio_sink_nbr); | 344 | astream = (Emotion_Audio_Stream *)eina_list_nth(ev->audio_streams, ev->audio_stream_nbr - 1); |
59 | _emotion_video_pos_update(ev->obj, ev->position, asink->length_time); | 345 | _emotion_video_pos_update(ev->obj, ev->position, astream->length_time); |
60 | } | 346 | } |
61 | 347 | ||
62 | query = gst_query_new_position(GST_FORMAT_TIME); | 348 | query = gst_query_new_position(GST_FORMAT_TIME); |
@@ -70,134 +356,32 @@ cb_handoff(GstElement *fakesrc __UNUSED__, | |||
70 | gst_query_unref(query); | 356 | gst_query_unref(query); |
71 | } | 357 | } |
72 | 358 | ||
73 | void | 359 | Emotion_Video_Stream * |
74 | file_new_decoded_pad_cb(GstElement *decodebin __UNUSED__, | 360 | emotion_video_stream_new(Emotion_Gstreamer_Video *ev) |
75 | GstPad *new_pad, | ||
76 | gboolean last __UNUSED__, | ||
77 | gpointer user_data) | ||
78 | { | ||
79 | Emotion_Gstreamer_Video *ev; | ||
80 | GstCaps *caps; | ||
81 | gchar *str; | ||
82 | unsigned int index; | ||
83 | |||
84 | ev = (Emotion_Gstreamer_Video *)user_data; | ||
85 | caps = gst_pad_get_caps(new_pad); | ||
86 | str = gst_caps_to_string(caps); | ||
87 | /* video stream */ | ||
88 | if (g_str_has_prefix(str, "video/")) | ||
89 | { | ||
90 | Emotion_Video_Sink *vsink; | ||
91 | GstElement *queue; | ||
92 | GstPad *videopad; | ||
93 | |||
94 | vsink = (Emotion_Video_Sink *)calloc(1, sizeof(Emotion_Video_Sink)); | ||
95 | if (!vsink) return; | ||
96 | ev->video_sinks = eina_list_append(ev->video_sinks, vsink); | ||
97 | if (eina_error_get()) | ||
98 | { | ||
99 | free(vsink); | ||
100 | return; | ||
101 | } | ||
102 | |||
103 | queue = gst_element_factory_make("queue", NULL); | ||
104 | vsink->sink = gst_element_factory_make("fakesink", "videosink"); | ||
105 | gst_bin_add_many(GST_BIN(ev->pipeline), queue, vsink->sink, NULL); | ||
106 | gst_element_link(queue, vsink->sink); | ||
107 | videopad = gst_element_get_pad(queue, "sink"); | ||
108 | gst_pad_link(new_pad, videopad); | ||
109 | gst_object_unref(videopad); | ||
110 | if (eina_list_count(ev->video_sinks) == 1) | ||
111 | { | ||
112 | ev->ratio = (double)vsink->width / (double)vsink->height; | ||
113 | } | ||
114 | gst_element_set_state(queue, GST_STATE_PAUSED); | ||
115 | gst_element_set_state(vsink->sink, GST_STATE_PAUSED); | ||
116 | } | ||
117 | /* audio stream */ | ||
118 | else if (g_str_has_prefix(str, "audio/")) | ||
119 | { | ||
120 | Emotion_Audio_Sink *asink; | ||
121 | GstPad *audiopad; | ||
122 | |||
123 | asink = (Emotion_Audio_Sink *)calloc(1, sizeof(Emotion_Audio_Sink)); | ||
124 | if (!asink) return; | ||
125 | ev->audio_sinks = eina_list_append(ev->audio_sinks, asink); | ||
126 | if (eina_error_get()) | ||
127 | { | ||
128 | free(asink); | ||
129 | return; | ||
130 | } | ||
131 | |||
132 | index = eina_list_count(ev->audio_sinks); | ||
133 | asink->sink = emotion_audio_sink_create(ev, index); | ||
134 | gst_bin_add(GST_BIN(ev->pipeline), asink->sink); | ||
135 | audiopad = gst_element_get_pad(asink->sink, "sink"); | ||
136 | gst_pad_link(new_pad, audiopad); | ||
137 | gst_element_set_state(asink->sink, GST_STATE_PAUSED); | ||
138 | } | ||
139 | |||
140 | free(str); | ||
141 | } | ||
142 | |||
143 | Emotion_Video_Sink * | ||
144 | emotion_video_sink_new(Emotion_Gstreamer_Video *ev) | ||
145 | { | 361 | { |
146 | Emotion_Video_Sink *vsink; | 362 | Emotion_Video_Stream *vstream; |
147 | 363 | ||
148 | if (!ev) return NULL; | 364 | if (!ev) return NULL; |
149 | 365 | ||
150 | vsink = (Emotion_Video_Sink *)calloc(1, sizeof(Emotion_Video_Sink)); | 366 | vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream)); |
151 | if (!vsink) return NULL; | 367 | if (!vstream) return NULL; |
152 | 368 | ||
153 | ev->video_sinks = eina_list_append(ev->video_sinks, vsink); | 369 | ev->video_streams = eina_list_append(ev->video_streams, vstream); |
154 | if (eina_error_get()) | 370 | if (eina_error_get()) |
155 | { | 371 | { |
156 | free(vsink); | 372 | free(vstream); |
157 | return NULL; | 373 | return NULL; |
158 | } | 374 | } |
159 | return vsink; | 375 | return vstream; |
160 | } | 376 | } |
161 | 377 | ||
162 | void | 378 | void |
163 | emotion_video_sink_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink) | 379 | emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream) |
164 | { | 380 | { |
165 | if (!ev || !vsink) return; | 381 | if (!ev || !vstream) return; |
166 | |||
167 | ev->video_sinks = eina_list_remove(ev->video_sinks, vsink); | ||
168 | free(vsink); | ||
169 | } | ||
170 | |||
171 | Emotion_Video_Sink * | ||
172 | emotion_visualization_sink_create(Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink) | ||
173 | { | ||
174 | Emotion_Video_Sink *vsink; | ||
175 | |||
176 | if (!ev) return NULL; | ||
177 | |||
178 | vsink = emotion_video_sink_new(ev); | ||
179 | if (!vsink) return NULL; | ||
180 | 382 | ||
181 | vsink->sink = gst_bin_get_by_name(GST_BIN(asink->sink), "vissink1"); | 383 | ev->video_streams = eina_list_remove(ev->video_streams, vstream); |
182 | if (!vsink->sink) | 384 | free(vstream); |
183 | { | ||
184 | emotion_video_sink_free(ev, vsink); | ||
185 | return NULL; | ||
186 | } | ||
187 | vsink->width = 320; | ||
188 | vsink->height = 200; | ||
189 | ev->ratio = (double)vsink->width / (double)vsink->height; | ||
190 | vsink->fps_num = 25; | ||
191 | vsink->fps_den = 1; | ||
192 | vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B'); | ||
193 | vsink->length_time = asink->length_time; | ||
194 | |||
195 | g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL); | ||
196 | g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL); | ||
197 | g_signal_connect(G_OBJECT(vsink->sink), | ||
198 | "handoff", | ||
199 | G_CALLBACK(cb_handoff), ev); | ||
200 | return vsink; | ||
201 | } | 385 | } |
202 | 386 | ||
203 | int | 387 | int |
@@ -295,327 +479,3 @@ emotion_visualization_element_name_get(Emotion_Vis visualisation) | |||
295 | return "goom"; | 479 | return "goom"; |
296 | } | 480 | } |
297 | } | 481 | } |
298 | |||
299 | static GstElement * | ||
300 | emotion_visualization_bin_create(Emotion_Gstreamer_Video *ev, int index) | ||
301 | { | ||
302 | const char *vis_name; | ||
303 | char buf[64]; | ||
304 | GstElement *vis, *visbin, *queue, *conv, *cspace, *sink; | ||
305 | GstPad *vispad; | ||
306 | GstCaps *caps; | ||
307 | |||
308 | if (ev->vis == EMOTION_VIS_NONE) | ||
309 | return NULL; | ||
310 | |||
311 | vis_name = emotion_visualization_element_name_get(ev->vis); | ||
312 | if (!vis_name) | ||
313 | return NULL; | ||
314 | |||
315 | g_snprintf(buf, sizeof(buf), "vis%d", index); | ||
316 | vis = gst_element_factory_make(vis_name, buf); | ||
317 | if (!vis) | ||
318 | return NULL; | ||
319 | |||
320 | g_snprintf(buf, sizeof(buf), "visbin%d", index); | ||
321 | visbin = gst_bin_new(buf); | ||
322 | |||
323 | queue = gst_element_factory_make("queue", NULL); | ||
324 | conv = gst_element_factory_make("audioconvert", NULL); | ||
325 | cspace = gst_element_factory_make("ffmpegcolorspace", NULL); | ||
326 | g_snprintf(buf, sizeof(buf), "vissink%d", index); | ||
327 | sink = gst_element_factory_make("fakesink", buf); | ||
328 | |||
329 | if ((!visbin) || (!queue) || (!conv) || (!cspace) || (!sink)) | ||
330 | goto error; | ||
331 | |||
332 | gst_bin_add_many(GST_BIN(visbin), queue, conv, vis, cspace, sink, NULL); | ||
333 | gst_element_link_many(queue, conv, vis, cspace, NULL); | ||
334 | caps = gst_caps_new_simple("video/x-raw-rgb", | ||
335 | "bpp", G_TYPE_INT, 32, | ||
336 | "width", G_TYPE_INT, 320, | ||
337 | "height", G_TYPE_INT, 200, | ||
338 | NULL); | ||
339 | gst_element_link_filtered(cspace, sink, caps); | ||
340 | |||
341 | vispad = gst_element_get_pad(queue, "sink"); | ||
342 | gst_element_add_pad(visbin, gst_ghost_pad_new("sink", vispad)); | ||
343 | gst_object_unref(vispad); | ||
344 | |||
345 | return visbin; | ||
346 | |||
347 | error: | ||
348 | if (vis) | ||
349 | gst_object_unref(vis); | ||
350 | if (visbin) | ||
351 | gst_object_unref(visbin); | ||
352 | if (queue) | ||
353 | gst_object_unref(queue); | ||
354 | if (conv) | ||
355 | gst_object_unref(conv); | ||
356 | if (cspace) | ||
357 | gst_object_unref(cspace); | ||
358 | if (sink) | ||
359 | gst_object_unref(sink); | ||
360 | |||
361 | return NULL; | ||
362 | } | ||
363 | |||
364 | static GstElement * | ||
365 | emotion_audio_bin_create(Emotion_Gstreamer_Video *ev, int index) | ||
366 | { | ||
367 | GstElement *audiobin, *queue, *conv, *resample, *volume, *sink; | ||
368 | GstPad *audiopad; | ||
369 | double vol; | ||
370 | |||
371 | audiobin = gst_bin_new(NULL); | ||
372 | queue = gst_element_factory_make("queue", NULL); | ||
373 | conv = gst_element_factory_make("audioconvert", NULL); | ||
374 | resample = gst_element_factory_make("audioresample", NULL); | ||
375 | volume = gst_element_factory_make("volume", "volume"); | ||
376 | |||
377 | if (index == 1) | ||
378 | sink = gst_element_factory_make("autoaudiosink", NULL); | ||
379 | else | ||
380 | /* XXX hack: use a proper mixer element here */ | ||
381 | sink = gst_element_factory_make("fakesink", NULL); | ||
382 | |||
383 | if ((!audiobin) || (!queue) || (!conv) || (!resample) || (!volume) || (!sink)) | ||
384 | goto error; | ||
385 | |||
386 | g_object_get(volume, "volume", &vol, NULL); | ||
387 | ev->volume = vol; | ||
388 | |||
389 | gst_bin_add_many(GST_BIN(audiobin), | ||
390 | queue, conv, resample, volume, sink, NULL); | ||
391 | gst_element_link_many(queue, conv, resample, volume, sink, NULL); | ||
392 | |||
393 | audiopad = gst_element_get_pad(queue, "sink"); | ||
394 | gst_element_add_pad(audiobin, gst_ghost_pad_new("sink", audiopad)); | ||
395 | gst_object_unref(audiopad); | ||
396 | |||
397 | return audiobin; | ||
398 | |||
399 | error: | ||
400 | if (audiobin) | ||
401 | gst_object_unref(audiobin); | ||
402 | if (queue) | ||
403 | gst_object_unref(queue); | ||
404 | if (conv) | ||
405 | gst_object_unref(conv); | ||
406 | if (resample) | ||
407 | gst_object_unref(resample); | ||
408 | if (volume) | ||
409 | gst_object_unref(volume); | ||
410 | if (sink) | ||
411 | gst_object_unref(sink); | ||
412 | |||
413 | return NULL; | ||
414 | } | ||
415 | |||
416 | |||
417 | GstElement * | ||
418 | emotion_audio_sink_create(Emotion_Gstreamer_Video *ev, int index) | ||
419 | { | ||
420 | gchar buf[128]; | ||
421 | GstElement *bin; | ||
422 | GstElement *audiobin; | ||
423 | GstElement *visbin = NULL; | ||
424 | GstElement *tee; | ||
425 | GstPad *teepad; | ||
426 | GstPad *binpad; | ||
427 | |||
428 | audiobin = emotion_audio_bin_create(ev, index); | ||
429 | if (!audiobin) | ||
430 | return NULL; | ||
431 | |||
432 | bin = gst_bin_new(NULL); | ||
433 | if (!bin) | ||
434 | { | ||
435 | gst_object_unref(audiobin); | ||
436 | return NULL; | ||
437 | } | ||
438 | |||
439 | g_snprintf(buf, 128, "tee%d", index); | ||
440 | tee = gst_element_factory_make("tee", buf); | ||
441 | |||
442 | visbin = emotion_visualization_bin_create(ev, index); | ||
443 | |||
444 | gst_bin_add_many(GST_BIN(bin), tee, audiobin, visbin, NULL); | ||
445 | |||
446 | binpad = gst_element_get_pad(audiobin, "sink"); | ||
447 | teepad = gst_element_get_request_pad(tee, "src%d"); | ||
448 | gst_pad_link(teepad, binpad); | ||
449 | gst_object_unref(teepad); | ||
450 | gst_object_unref(binpad); | ||
451 | |||
452 | if (visbin) | ||
453 | { | ||
454 | binpad = gst_element_get_pad(visbin, "sink"); | ||
455 | teepad = gst_element_get_request_pad(tee, "src%d"); | ||
456 | gst_pad_link(teepad, binpad); | ||
457 | gst_object_unref(teepad); | ||
458 | gst_object_unref(binpad); | ||
459 | } | ||
460 | |||
461 | teepad = gst_element_get_pad(tee, "sink"); | ||
462 | gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad)); | ||
463 | gst_object_unref(teepad); | ||
464 | |||
465 | return bin; | ||
466 | } | ||
467 | |||
468 | void | ||
469 | emotion_streams_sinks_get(Emotion_Gstreamer_Video *ev, GstElement *decoder) | ||
470 | { | ||
471 | GstIterator *it; | ||
472 | Eina_List *alist; | ||
473 | Eina_List *vlist; | ||
474 | gpointer data; | ||
475 | |||
476 | alist = ev->audio_sinks; | ||
477 | vlist = ev->video_sinks; | ||
478 | |||
479 | it = gst_element_iterate_src_pads(decoder); | ||
480 | while (gst_iterator_next(it, &data) == GST_ITERATOR_OK) | ||
481 | { | ||
482 | GstPad *pad; | ||
483 | GstCaps *caps; | ||
484 | gchar *str; | ||
485 | |||
486 | pad = GST_PAD(data); | ||
487 | |||
488 | caps = gst_pad_get_caps(pad); | ||
489 | str = gst_caps_to_string(caps); | ||
490 | DBG("caps %s", str); | ||
491 | |||
492 | /* video stream */ | ||
493 | if (g_str_has_prefix(str, "video/")) | ||
494 | { | ||
495 | Emotion_Video_Sink *vsink; | ||
496 | |||
497 | vsink = (Emotion_Video_Sink *)eina_list_data_get(vlist); | ||
498 | vlist = eina_list_next(vlist); | ||
499 | |||
500 | emotion_video_sink_fill(vsink, pad, caps); | ||
501 | ev->ratio = (double)vsink->width / (double)vsink->height; | ||
502 | |||
503 | } | ||
504 | /* audio stream */ | ||
505 | else if (g_str_has_prefix(str, "audio/")) | ||
506 | { | ||
507 | Emotion_Audio_Sink *asink; | ||
508 | unsigned int index; | ||
509 | |||
510 | asink = (Emotion_Audio_Sink *)eina_list_data_get(alist); | ||
511 | alist = eina_list_next(alist); | ||
512 | |||
513 | emotion_audio_sink_fill(asink, pad, caps); | ||
514 | |||
515 | for (index = 0; asink != eina_list_nth(ev->audio_sinks, index) ; index++) | ||
516 | ; | ||
517 | |||
518 | if (eina_list_count(ev->video_sinks) == 0) | ||
519 | { | ||
520 | if (index == 1) | ||
521 | { | ||
522 | Emotion_Video_Sink *vsink; | ||
523 | |||
524 | vsink = emotion_visualization_sink_create(ev, asink); | ||
525 | if (!vsink) goto finalize; | ||
526 | } | ||
527 | } | ||
528 | else | ||
529 | { | ||
530 | gchar buf[128]; | ||
531 | GstElement *visbin; | ||
532 | |||
533 | g_snprintf(buf, 128, "visbin%d", index); | ||
534 | visbin = gst_bin_get_by_name(GST_BIN(ev->pipeline), buf); | ||
535 | if (visbin) | ||
536 | { | ||
537 | GstPad *srcpad; | ||
538 | GstPad *sinkpad; | ||
539 | |||
540 | sinkpad = gst_element_get_pad(visbin, "sink"); | ||
541 | srcpad = gst_pad_get_peer(sinkpad); | ||
542 | gst_pad_unlink(srcpad, sinkpad); | ||
543 | |||
544 | gst_object_unref(srcpad); | ||
545 | gst_object_unref(sinkpad); | ||
546 | } | ||
547 | } | ||
548 | } | ||
549 | finalize: | ||
550 | gst_caps_unref(caps); | ||
551 | g_free(str); | ||
552 | gst_object_unref(pad); | ||
553 | } | ||
554 | gst_iterator_free(it); | ||
555 | } | ||
556 | |||
557 | void | ||
558 | emotion_video_sink_fill(Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps) | ||
559 | { | ||
560 | GstStructure *structure; | ||
561 | GstQuery *query; | ||
562 | const GValue *val; | ||
563 | gchar *str; | ||
564 | |||
565 | structure = gst_caps_get_structure(caps, 0); | ||
566 | str = gst_caps_to_string(caps); | ||
567 | |||
568 | gst_structure_get_int(structure, "width", &vsink->width); | ||
569 | gst_structure_get_int(structure, "height", &vsink->height); | ||
570 | |||
571 | vsink->fps_num = 1; | ||
572 | vsink->fps_den = 1; | ||
573 | val = gst_structure_get_value(structure, "framerate"); | ||
574 | if (val) | ||
575 | { | ||
576 | vsink->fps_num = gst_value_get_fraction_numerator(val); | ||
577 | vsink->fps_den = gst_value_get_fraction_denominator(val); | ||
578 | } | ||
579 | if (g_str_has_prefix(str, "video/x-raw-yuv")) | ||
580 | { | ||
581 | val = gst_structure_get_value(structure, "format"); | ||
582 | vsink->fourcc = gst_value_get_fourcc(val); | ||
583 | } | ||
584 | else if (g_str_has_prefix(str, "video/x-raw-rgb")) | ||
585 | vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B'); | ||
586 | else | ||
587 | vsink->fourcc = 0; | ||
588 | |||
589 | query = gst_query_new_duration(GST_FORMAT_TIME); | ||
590 | if (gst_pad_query(pad, query)) | ||
591 | { | ||
592 | gint64 time; | ||
593 | |||
594 | gst_query_parse_duration(query, NULL, &time); | ||
595 | vsink->length_time = (double)time / (double)GST_SECOND; | ||
596 | } | ||
597 | g_free(str); | ||
598 | gst_query_unref(query); | ||
599 | } | ||
600 | |||
601 | void | ||
602 | emotion_audio_sink_fill(Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps) | ||
603 | { | ||
604 | GstStructure *structure; | ||
605 | GstQuery *query; | ||
606 | |||
607 | structure = gst_caps_get_structure(caps, 0); | ||
608 | |||
609 | gst_structure_get_int(structure, "channels", &asink->channels); | ||
610 | gst_structure_get_int(structure, "rate", &asink->samplerate); | ||
611 | |||
612 | query = gst_query_new_duration(GST_FORMAT_TIME); | ||
613 | if (gst_pad_query(pad, query)) | ||
614 | { | ||
615 | gint64 time; | ||
616 | |||
617 | gst_query_parse_duration(query, NULL, &time); | ||
618 | asink->length_time = (double)time / (double)GST_SECOND; | ||
619 | } | ||
620 | gst_query_unref(query); | ||
621 | } | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h index 26cfac2f2b..92c0d65a40 100644 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h +++ b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline.h | |||
@@ -7,34 +7,18 @@ | |||
7 | 7 | ||
8 | gboolean emotion_pipeline_pause (GstElement *pipeline); | 8 | gboolean emotion_pipeline_pause (GstElement *pipeline); |
9 | 9 | ||
10 | int emotion_pipeline_cdda_build (void *video, const char * device, unsigned int track); | 10 | Eina_Bool _emotion_pipeline_build(Emotion_Gstreamer_Video *ev, const char *file); |
11 | int emotion_pipeline_file_build (void *video, const char *file); | ||
12 | int emotion_pipeline_uri_build (void *video, const char *uri); | ||
13 | int emotion_pipeline_dvd_build (void *video, const char *device); | ||
14 | int emotion_pipeline_v4l_build (void *video, const char *device); | ||
15 | int emotion_pipeline_cdda_track_count_get (void *video); | 11 | int emotion_pipeline_cdda_track_count_get (void *video); |
16 | 12 | ||
17 | GstElement *emotion_audio_sink_create (Emotion_Gstreamer_Video *ev, int index); | 13 | GstElement *emotion_audio_stream_create (Emotion_Gstreamer_Video *ev, int index); |
18 | Emotion_Video_Sink *emotion_video_sink_new (Emotion_Gstreamer_Video *ev); | 14 | Emotion_Video_Stream *emotion_video_stream_new (Emotion_Gstreamer_Video *ev); |
19 | void emotion_video_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink); | 15 | void emotion_video_stream_free (Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream); |
20 | Emotion_Video_Sink *emotion_visualization_sink_create (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink); | ||
21 | |||
22 | void emotion_streams_sinks_get (Emotion_Gstreamer_Video *ev, GstElement *decoder); | ||
23 | |||
24 | void emotion_video_sink_fill (Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps); | ||
25 | |||
26 | void emotion_audio_sink_fill (Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps); | ||
27 | 16 | ||
28 | void cb_handoff (GstElement *fakesrc, | 17 | void cb_handoff (GstElement *fakesrc, |
29 | GstBuffer *buffer, | 18 | GstBuffer *buffer, |
30 | GstPad *pad, | 19 | GstPad *pad, |
31 | gpointer user_data); | 20 | gpointer user_data); |
32 | 21 | ||
33 | void file_new_decoded_pad_cb (GstElement *decodebin, | ||
34 | GstPad *new_pad, | ||
35 | gboolean last, | ||
36 | gpointer user_data); | ||
37 | |||
38 | const char *emotion_visualization_element_name_get(Emotion_Vis visualisation); | 22 | const char *emotion_visualization_element_name_get(Emotion_Vis visualisation); |
39 | 23 | ||
40 | 24 | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c deleted file mode 100644 index 3bd9db1ece..0000000000 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_cdda.c +++ /dev/null | |||
@@ -1,123 +0,0 @@ | |||
1 | #include "emotion_gstreamer.h" | ||
2 | #include "emotion_gstreamer_pipeline.h" | ||
3 | |||
4 | |||
5 | static Emotion_Audio_Sink *_emotion_audio_sink_new (Emotion_Gstreamer_Video *ev); | ||
6 | static void _emotion_audio_sink_free (Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink); | ||
7 | |||
8 | int | ||
9 | emotion_pipeline_cdda_build(void *video, const char * device, unsigned int track) | ||
10 | { | ||
11 | GstElement *cdiocddasrc; | ||
12 | Emotion_Video_Sink *vsink; | ||
13 | Emotion_Audio_Sink *asink; | ||
14 | Emotion_Gstreamer_Video *ev; | ||
15 | /* GstFormat format; */ | ||
16 | /* gint64 tracks_count; */ | ||
17 | |||
18 | ev = (Emotion_Gstreamer_Video *)video; | ||
19 | if (!ev) return 0; | ||
20 | |||
21 | cdiocddasrc = gst_element_factory_make("cdiocddasrc", "src"); | ||
22 | if (!cdiocddasrc) | ||
23 | { | ||
24 | ERR("cdiocddasrc gstreamer element missing. Install it."); | ||
25 | goto failure_cdiocddasrc; | ||
26 | } | ||
27 | |||
28 | if (device) | ||
29 | g_object_set(G_OBJECT(cdiocddasrc), "device", device, NULL); | ||
30 | |||
31 | g_object_set(G_OBJECT(cdiocddasrc), "track", track, NULL); | ||
32 | |||
33 | asink = _emotion_audio_sink_new(ev); | ||
34 | if (!asink) | ||
35 | goto failure_emotion_sink; | ||
36 | |||
37 | asink->sink = emotion_audio_sink_create(ev, 1); | ||
38 | if (!asink->sink) | ||
39 | goto failure_gstreamer_sink; | ||
40 | |||
41 | gst_bin_add_many((GST_BIN(ev->pipeline)), cdiocddasrc, asink->sink, NULL); | ||
42 | |||
43 | if (!gst_element_link(cdiocddasrc, asink->sink)) | ||
44 | goto failure_link; | ||
45 | |||
46 | vsink = emotion_visualization_sink_create(ev, asink); | ||
47 | if (!vsink) goto failure_link; | ||
48 | |||
49 | if (!emotion_pipeline_pause(ev->pipeline)) | ||
50 | goto failure_gstreamer_pause; | ||
51 | |||
52 | { | ||
53 | GstQuery *query; | ||
54 | GstPad *pad; | ||
55 | GstCaps *caps; | ||
56 | GstStructure *structure; | ||
57 | |||
58 | /* should always be found */ | ||
59 | pad = gst_element_get_pad(cdiocddasrc, "src"); | ||
60 | |||
61 | caps = gst_pad_get_caps(pad); | ||
62 | structure = gst_caps_get_structure(GST_CAPS(caps), 0); | ||
63 | |||
64 | gst_structure_get_int(structure, "channels", &asink->channels); | ||
65 | gst_structure_get_int(structure, "rate", &asink->samplerate); | ||
66 | |||
67 | gst_caps_unref(caps); | ||
68 | |||
69 | query = gst_query_new_duration(GST_FORMAT_TIME); | ||
70 | if (gst_pad_query(pad, query)) | ||
71 | { | ||
72 | gint64 time; | ||
73 | |||
74 | gst_query_parse_duration(query, NULL, &time); | ||
75 | asink->length_time = (double)time / (double)GST_SECOND; | ||
76 | vsink->length_time = asink->length_time; | ||
77 | } | ||
78 | gst_query_unref(query); | ||
79 | gst_object_unref(GST_OBJECT(pad)); | ||
80 | } | ||
81 | |||
82 | return 1; | ||
83 | |||
84 | failure_gstreamer_pause: | ||
85 | emotion_video_sink_free(ev, vsink); | ||
86 | failure_link: | ||
87 | gst_bin_remove(GST_BIN(ev->pipeline), asink->sink); | ||
88 | failure_gstreamer_sink: | ||
89 | _emotion_audio_sink_free(ev, asink); | ||
90 | failure_emotion_sink: | ||
91 | gst_bin_remove(GST_BIN(ev->pipeline), cdiocddasrc); | ||
92 | failure_cdiocddasrc: | ||
93 | |||
94 | return 0; | ||
95 | } | ||
96 | |||
97 | static Emotion_Audio_Sink * | ||
98 | _emotion_audio_sink_new(Emotion_Gstreamer_Video *ev) | ||
99 | { | ||
100 | Emotion_Audio_Sink *asink; | ||
101 | |||
102 | if (!ev) return NULL; | ||
103 | |||
104 | asink = (Emotion_Audio_Sink *)malloc(sizeof(Emotion_Audio_Sink)); | ||
105 | if (!asink) return NULL; | ||
106 | |||
107 | ev->audio_sinks = eina_list_append(ev->audio_sinks, asink); | ||
108 | if (eina_error_get()) | ||
109 | { | ||
110 | free(asink); | ||
111 | return NULL; | ||
112 | } | ||
113 | return asink; | ||
114 | } | ||
115 | |||
116 | static void | ||
117 | _emotion_audio_sink_free(Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink) | ||
118 | { | ||
119 | if (!ev || !asink) return; | ||
120 | |||
121 | ev->audio_sinks = eina_list_remove(ev->audio_sinks, asink); | ||
122 | free(asink); | ||
123 | } | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c deleted file mode 100644 index 8f5f5a00ec..0000000000 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_dvd.c +++ /dev/null | |||
@@ -1,243 +0,0 @@ | |||
1 | #include "emotion_gstreamer.h" | ||
2 | #include "emotion_gstreamer_pipeline.h" | ||
3 | |||
4 | |||
5 | static void dvd_pad_added_cb (GstElement *dvddemuxer, | ||
6 | GObject *new_pad, | ||
7 | gpointer user_data); | ||
8 | |||
9 | static void dvd_no_more_pads_cb (GstElement *dvddemuxer, | ||
10 | gpointer user_data); | ||
11 | |||
12 | static volatile int no_more_pads = 0; | ||
13 | |||
14 | |||
15 | int | ||
16 | emotion_pipeline_dvd_build(void *video, const char *device) | ||
17 | { | ||
18 | GstElement *dvdreadsrc; | ||
19 | GstElement *dvddemux; | ||
20 | Emotion_Gstreamer_Video *ev; | ||
21 | Eina_List *alist; | ||
22 | Eina_List *vlist; | ||
23 | |||
24 | ev = (Emotion_Gstreamer_Video *)video; | ||
25 | if (!ev) return 0; | ||
26 | |||
27 | dvdreadsrc = gst_element_factory_make("dvdreadsrc", "src"); | ||
28 | if (!dvdreadsrc) | ||
29 | goto failure_dvdreadsrc; | ||
30 | if (device) | ||
31 | g_object_set(G_OBJECT(dvdreadsrc), "device", device, NULL); | ||
32 | |||
33 | dvddemux = gst_element_factory_make("dvddemux", "dvddemux"); | ||
34 | if (!dvddemux) | ||
35 | goto failure_dvddemux; | ||
36 | g_signal_connect(dvddemux, "pad-added", | ||
37 | G_CALLBACK(dvd_pad_added_cb), ev); | ||
38 | g_signal_connect(dvddemux, "no-more-pads", | ||
39 | G_CALLBACK(dvd_no_more_pads_cb), ev); | ||
40 | |||
41 | gst_bin_add_many(GST_BIN(ev->pipeline), dvdreadsrc, dvddemux, NULL); | ||
42 | if (!gst_element_link(dvdreadsrc, dvddemux)) | ||
43 | goto failure_link; | ||
44 | |||
45 | if (!emotion_pipeline_pause(ev->pipeline)) | ||
46 | goto failure_gstreamer_pause; | ||
47 | |||
48 | while (no_more_pads == 0) | ||
49 | { | ||
50 | DBG("toto"); | ||
51 | } | ||
52 | no_more_pads = 0; | ||
53 | |||
54 | /* We get the informations of streams */ | ||
55 | alist = ev->audio_sinks; | ||
56 | vlist = ev->video_sinks; | ||
57 | |||
58 | { | ||
59 | GstIterator *it; | ||
60 | gpointer data; | ||
61 | |||
62 | it = gst_element_iterate_src_pads(dvddemux); | ||
63 | while (gst_iterator_next(it, &data) == GST_ITERATOR_OK) | ||
64 | { | ||
65 | GstPad *pad; | ||
66 | GstCaps *caps; | ||
67 | gchar *str; | ||
68 | |||
69 | pad = GST_PAD(data); | ||
70 | |||
71 | caps = gst_pad_get_caps(pad); | ||
72 | str = gst_caps_to_string(caps); | ||
73 | DBG("caps %s", str); | ||
74 | /* video stream */ | ||
75 | if (g_str_has_prefix(str, "video/mpeg")) | ||
76 | { | ||
77 | Emotion_Video_Sink *vsink; | ||
78 | GstPad *sink_pad; | ||
79 | GstCaps *sink_caps; | ||
80 | |||
81 | vsink = (Emotion_Video_Sink *)eina_list_data_get(vlist); | ||
82 | vlist = eina_list_next(vlist); | ||
83 | sink_pad = gst_element_get_pad(gst_bin_get_by_name(GST_BIN(ev->pipeline), "mpeg2dec"), "src"); | ||
84 | sink_caps = gst_pad_get_caps(sink_pad); | ||
85 | str = gst_caps_to_string(sink_caps); | ||
86 | DBG("caps video %s", str); | ||
87 | |||
88 | emotion_video_sink_fill(vsink, sink_pad, sink_caps); | ||
89 | |||
90 | gst_caps_unref(sink_caps); | ||
91 | gst_object_unref(sink_pad); | ||
92 | } | ||
93 | /* audio stream */ | ||
94 | else if (g_str_has_prefix(str, "audio/")) | ||
95 | { | ||
96 | Emotion_Audio_Sink *asink; | ||
97 | GstPad *sink_pad; | ||
98 | GstCaps *sink_caps; | ||
99 | |||
100 | asink = (Emotion_Audio_Sink *)eina_list_data_get(alist); | ||
101 | alist = eina_list_next(alist); | ||
102 | sink_pad = gst_element_get_pad(gst_bin_get_by_name(GST_BIN(ev->pipeline), "a52dec"), "src"); | ||
103 | sink_caps = gst_pad_get_caps(sink_pad); | ||
104 | |||
105 | emotion_audio_sink_fill(asink, sink_pad, sink_caps); | ||
106 | } | ||
107 | gst_caps_unref(caps); | ||
108 | g_free(str); | ||
109 | gst_object_unref(pad); | ||
110 | } | ||
111 | gst_iterator_free(it); | ||
112 | } | ||
113 | |||
114 | /* The first vsink is a valid Emotion_Video_Sink * */ | ||
115 | /* If no video stream is found, it's a visualisation sink */ | ||
116 | { | ||
117 | Emotion_Video_Sink *vsink; | ||
118 | |||
119 | vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks); | ||
120 | if (vsink && vsink->sink) | ||
121 | { | ||
122 | g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL); | ||
123 | g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL); | ||
124 | g_signal_connect(G_OBJECT(vsink->sink), | ||
125 | "handoff", | ||
126 | G_CALLBACK(cb_handoff), ev); | ||
127 | } | ||
128 | } | ||
129 | |||
130 | return 1; | ||
131 | |||
132 | failure_gstreamer_pause: | ||
133 | failure_link: | ||
134 | gst_element_set_state(ev->pipeline, GST_STATE_NULL); | ||
135 | gst_bin_remove(GST_BIN(ev->pipeline), dvddemux); | ||
136 | failure_dvddemux: | ||
137 | gst_bin_remove(GST_BIN(ev->pipeline), dvdreadsrc); | ||
138 | failure_dvdreadsrc: | ||
139 | |||
140 | return 0; | ||
141 | } | ||
142 | |||
143 | static void | ||
144 | dvd_pad_added_cb(GstElement *dvddemuxer __UNUSED__, | ||
145 | GObject *new_pad, | ||
146 | gpointer user_data) | ||
147 | { | ||
148 | Emotion_Gstreamer_Video *ev; | ||
149 | GstCaps *caps; | ||
150 | gchar *str; | ||
151 | |||
152 | ev = (Emotion_Gstreamer_Video *)user_data; | ||
153 | caps = gst_pad_get_caps(GST_PAD(new_pad)); | ||
154 | str = gst_caps_to_string(caps); | ||
155 | /* video stream */ | ||
156 | if (g_str_has_prefix(str, "video/mpeg")) | ||
157 | { | ||
158 | Emotion_Video_Sink *vsink; | ||
159 | GstElement *queue; | ||
160 | GstElement *decoder; | ||
161 | GstPad *videopad; | ||
162 | |||
163 | vsink = (Emotion_Video_Sink *)malloc(sizeof(Emotion_Video_Sink)); | ||
164 | if (!vsink) return; | ||
165 | ev->video_sinks = eina_list_append(ev->video_sinks, vsink); | ||
166 | if (eina_error_get()) | ||
167 | { | ||
168 | free(vsink); | ||
169 | return; | ||
170 | } | ||
171 | |||
172 | queue = gst_element_factory_make("queue", NULL); | ||
173 | decoder = gst_element_factory_make("mpeg2dec", "mpeg2dec"); | ||
174 | vsink->sink = gst_element_factory_make("fakesink", "videosink"); | ||
175 | gst_bin_add_many(GST_BIN(ev->pipeline), queue, decoder, vsink->sink, NULL); | ||
176 | gst_element_link(queue, decoder); | ||
177 | gst_element_link(decoder, vsink->sink); | ||
178 | videopad = gst_element_get_pad(queue, "sink"); | ||
179 | gst_pad_link(GST_PAD(new_pad), videopad); | ||
180 | gst_object_unref(videopad); | ||
181 | if (eina_list_count(ev->video_sinks) == 1) | ||
182 | { | ||
183 | ev->ratio = (double)vsink->width / (double)vsink->height; | ||
184 | } | ||
185 | gst_element_set_state(queue, GST_STATE_PAUSED); | ||
186 | gst_element_set_state(decoder, GST_STATE_PAUSED); | ||
187 | gst_element_set_state(vsink->sink, GST_STATE_PAUSED); | ||
188 | } | ||
189 | /* audio stream */ | ||
190 | else if (g_str_has_prefix(str, "audio/")) | ||
191 | { | ||
192 | Emotion_Audio_Sink *asink; | ||
193 | GstElement *queue; | ||
194 | GstElement *decoder; | ||
195 | GstElement *conv; | ||
196 | GstElement *resample; | ||
197 | GstElement *volume; | ||
198 | GstPad *audiopad; | ||
199 | double vol; | ||
200 | |||
201 | asink = (Emotion_Audio_Sink *)malloc(sizeof(Emotion_Audio_Sink)); | ||
202 | if (!asink) return; | ||
203 | ev->audio_sinks = eina_list_append(ev->audio_sinks, asink); | ||
204 | if (eina_error_get()) | ||
205 | { | ||
206 | free(asink); | ||
207 | return; | ||
208 | } | ||
209 | |||
210 | queue = gst_element_factory_make("queue", NULL); | ||
211 | decoder = gst_element_factory_make("a52dec", "a52dec"); | ||
212 | conv = gst_element_factory_make("audioconvert", NULL); | ||
213 | resample = gst_element_factory_make("audioresample", NULL); | ||
214 | volume = gst_element_factory_make("volume", "volume"); | ||
215 | g_object_get(G_OBJECT(volume), "volume", &vol, NULL); | ||
216 | ev->volume = vol / 10.0; | ||
217 | |||
218 | /* FIXME: must manage several audio streams */ | ||
219 | asink->sink = gst_element_factory_make("fakesink", NULL); | ||
220 | |||
221 | gst_bin_add_many(GST_BIN(ev->pipeline), | ||
222 | queue, decoder, conv, resample, volume, asink->sink, NULL); | ||
223 | gst_element_link_many(queue, decoder, conv, resample, volume, asink->sink, NULL); | ||
224 | |||
225 | audiopad = gst_element_get_pad(queue, "sink"); | ||
226 | gst_pad_link(GST_PAD(new_pad), audiopad); | ||
227 | gst_object_unref(audiopad); | ||
228 | |||
229 | gst_element_set_state(queue, GST_STATE_PAUSED); | ||
230 | gst_element_set_state(decoder, GST_STATE_PAUSED); | ||
231 | gst_element_set_state(conv, GST_STATE_PAUSED); | ||
232 | gst_element_set_state(resample, GST_STATE_PAUSED); | ||
233 | gst_element_set_state(volume, GST_STATE_PAUSED); | ||
234 | gst_element_set_state(asink->sink, GST_STATE_PAUSED); | ||
235 | } | ||
236 | } | ||
237 | |||
238 | static void | ||
239 | dvd_no_more_pads_cb(GstElement *dvddemuxer __UNUSED__, | ||
240 | gpointer user_data __UNUSED__) | ||
241 | { | ||
242 | no_more_pads = 1; | ||
243 | } | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c deleted file mode 100644 index 13cd381c7a..0000000000 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_file.c +++ /dev/null | |||
@@ -1,61 +0,0 @@ | |||
1 | #include "emotion_gstreamer.h" | ||
2 | #include "emotion_gstreamer_pipeline.h" | ||
3 | |||
4 | int | ||
5 | emotion_pipeline_file_build(void *video, const char *file) | ||
6 | { | ||
7 | GstElement *filesrc; | ||
8 | GstElement *decodebin; | ||
9 | Emotion_Gstreamer_Video *ev; | ||
10 | |||
11 | ev = (Emotion_Gstreamer_Video *)video; | ||
12 | if (!ev) return 0; | ||
13 | |||
14 | filesrc = gst_element_factory_make("filesrc", "src"); | ||
15 | if (!filesrc) | ||
16 | goto failure_filesrc; | ||
17 | g_object_set(G_OBJECT(filesrc), "location", file, NULL); | ||
18 | |||
19 | decodebin = gst_element_factory_make("decodebin", "decodebin"); | ||
20 | if (!decodebin) | ||
21 | goto failure_decodebin; | ||
22 | g_signal_connect(decodebin, "new-decoded-pad", | ||
23 | G_CALLBACK(file_new_decoded_pad_cb), ev); | ||
24 | |||
25 | gst_bin_add_many(GST_BIN(ev->pipeline), filesrc, decodebin, NULL); | ||
26 | if (!gst_element_link(filesrc, decodebin)) | ||
27 | goto failure_link; | ||
28 | |||
29 | if (!emotion_pipeline_pause(ev->pipeline)) | ||
30 | goto failure_gstreamer_pause; | ||
31 | |||
32 | emotion_streams_sinks_get(ev, decodebin); | ||
33 | |||
34 | /* The first vsink is a valid Emotion_Video_Sink * */ | ||
35 | /* If no video stream is found, it's a visualisation sink */ | ||
36 | { | ||
37 | Emotion_Video_Sink *vsink; | ||
38 | |||
39 | vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks); | ||
40 | if (vsink && vsink->sink) | ||
41 | { | ||
42 | g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL); | ||
43 | g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL); | ||
44 | g_signal_connect(G_OBJECT(vsink->sink), | ||
45 | "handoff", | ||
46 | G_CALLBACK(cb_handoff), ev); | ||
47 | } | ||
48 | } | ||
49 | |||
50 | return 1; | ||
51 | |||
52 | failure_gstreamer_pause: | ||
53 | failure_link: | ||
54 | gst_element_set_state(ev->pipeline, GST_STATE_NULL); | ||
55 | gst_bin_remove(GST_BIN(ev->pipeline), decodebin); | ||
56 | failure_decodebin: | ||
57 | gst_bin_remove(GST_BIN(ev->pipeline), filesrc); | ||
58 | failure_filesrc: | ||
59 | |||
60 | return 0; | ||
61 | } | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c deleted file mode 100644 index 6fb6abca2c..0000000000 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_uri.c +++ /dev/null | |||
@@ -1,63 +0,0 @@ | |||
1 | #include "emotion_gstreamer.h" | ||
2 | #include "emotion_gstreamer_pipeline.h" | ||
3 | |||
4 | int | ||
5 | emotion_pipeline_uri_build(void *video, const char *uri) | ||
6 | { | ||
7 | GstElement *src; | ||
8 | GstElement *decodebin; | ||
9 | Emotion_Gstreamer_Video *ev; | ||
10 | |||
11 | ev = (Emotion_Gstreamer_Video *)video; | ||
12 | if (!ev) return 0; | ||
13 | |||
14 | if (gst_uri_protocol_is_supported(GST_URI_SRC, uri)) | ||
15 | goto failure_src; | ||
16 | src = gst_element_make_from_uri(GST_URI_SRC, uri, "src"); | ||
17 | if (!src) | ||
18 | goto failure_src; | ||
19 | g_object_set(G_OBJECT(src), "location", uri, NULL); | ||
20 | |||
21 | decodebin = gst_element_factory_make("decodebin", "decodebin"); | ||
22 | if (!decodebin) | ||
23 | goto failure_decodebin; | ||
24 | g_signal_connect(decodebin, "new-decoded-pad", | ||
25 | G_CALLBACK(file_new_decoded_pad_cb), ev); | ||
26 | |||
27 | gst_bin_add_many(GST_BIN(ev->pipeline), src, decodebin, NULL); | ||
28 | if (!gst_element_link(src, decodebin)) | ||
29 | goto failure_link; | ||
30 | |||
31 | if (!emotion_pipeline_pause(ev->pipeline)) | ||
32 | goto failure_gstreamer_pause; | ||
33 | |||
34 | emotion_streams_sinks_get(ev, decodebin); | ||
35 | |||
36 | /* The first vsink is a valid Emotion_Video_Sink * */ | ||
37 | /* If no video stream is found, it's a visualisation sink */ | ||
38 | { | ||
39 | Emotion_Video_Sink *vsink; | ||
40 | |||
41 | vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks); | ||
42 | if (vsink && vsink->sink) | ||
43 | { | ||
44 | g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL); | ||
45 | g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL); | ||
46 | g_signal_connect(G_OBJECT(vsink->sink), | ||
47 | "handoff", | ||
48 | G_CALLBACK(cb_handoff), ev); | ||
49 | } | ||
50 | } | ||
51 | |||
52 | return 1; | ||
53 | |||
54 | failure_gstreamer_pause: | ||
55 | failure_link: | ||
56 | gst_element_set_state(ev->pipeline, GST_STATE_NULL); | ||
57 | gst_bin_remove(GST_BIN(ev->pipeline), decodebin); | ||
58 | failure_decodebin: | ||
59 | gst_bin_remove(GST_BIN(ev->pipeline), src); | ||
60 | failure_src: | ||
61 | |||
62 | return 0; | ||
63 | } | ||
diff --git a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_v4l.c b/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_v4l.c deleted file mode 100644 index 72b88622a3..0000000000 --- a/legacy/emotion/src/modules/gstreamer/emotion_gstreamer_pipeline_v4l.c +++ /dev/null | |||
@@ -1,84 +0,0 @@ | |||
1 | #include "emotion_gstreamer.h" | ||
2 | #include "emotion_gstreamer_pipeline.h" | ||
3 | |||
4 | int | ||
5 | emotion_pipeline_v4l_build(void *video, const char *device) | ||
6 | { | ||
7 | GstElement *v4l2src, *cspace, *queue, *sink; | ||
8 | Emotion_Video_Sink *vsink; | ||
9 | GstCaps *caps; | ||
10 | Emotion_Gstreamer_Video *ev; | ||
11 | char dev[128]; | ||
12 | int devno; | ||
13 | |||
14 | ev = (Emotion_Gstreamer_Video *)video; | ||
15 | if (!ev) return 0; | ||
16 | |||
17 | v4l2src = gst_element_factory_make("v4l2src", "v4l2src"); | ||
18 | cspace = gst_element_factory_make("ffmpegcolorspace", "cspace"); | ||
19 | queue = gst_element_factory_make("queue", "queue"); | ||
20 | sink = gst_element_factory_make("fakesink", "sink"); | ||
21 | |||
22 | if ((!v4l2src) || (!cspace) || (!queue) || (!sink)) | ||
23 | goto failure; | ||
24 | |||
25 | if (sscanf(device, "v4l://%d", &devno) != 1) | ||
26 | devno = 0; | ||
27 | |||
28 | snprintf(dev, sizeof(dev), "/dev/video%d", devno); | ||
29 | g_object_set (v4l2src, "device", dev, NULL); | ||
30 | |||
31 | gst_bin_add_many(GST_BIN(ev->pipeline), v4l2src, cspace, queue, sink, NULL); | ||
32 | |||
33 | caps = gst_caps_new_simple("video/x-raw-yuv", | ||
34 | "width", G_TYPE_INT, 320, | ||
35 | "height", G_TYPE_INT, 240, | ||
36 | NULL); | ||
37 | if (!gst_element_link_filtered(v4l2src, cspace, caps)) | ||
38 | { | ||
39 | gst_caps_unref(caps); | ||
40 | goto failure; | ||
41 | } | ||
42 | gst_caps_unref(caps); | ||
43 | |||
44 | caps = gst_caps_new_simple("video/x-raw-rgb", | ||
45 | "bpp", G_TYPE_INT, 32, | ||
46 | "width", G_TYPE_INT, 320, | ||
47 | "height", G_TYPE_INT, 240, | ||
48 | NULL); | ||
49 | if (!gst_element_link_filtered(cspace, queue, caps)) | ||
50 | { | ||
51 | gst_caps_unref(caps); | ||
52 | goto failure; | ||
53 | } | ||
54 | gst_caps_unref(caps); | ||
55 | |||
56 | gst_element_link(queue, sink); | ||
57 | |||
58 | vsink = emotion_video_sink_new(ev); | ||
59 | if(!vsink) goto failure; | ||
60 | vsink->sink = sink; | ||
61 | vsink->width=320; | ||
62 | vsink->height=240; | ||
63 | vsink->fourcc = GST_MAKE_FOURCC ('A', 'R', 'G', 'B'); | ||
64 | |||
65 | g_object_set(G_OBJECT(vsink->sink), "sync", FALSE, NULL); | ||
66 | g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL); | ||
67 | g_signal_connect(G_OBJECT(vsink->sink), | ||
68 | "handoff", | ||
69 | G_CALLBACK(cb_handoff), ev); | ||
70 | |||
71 | return 1; | ||
72 | |||
73 | failure: | ||
74 | if(v4l2src) | ||
75 | gst_object_unref(v4l2src); | ||
76 | if(cspace) | ||
77 | gst_object_unref(cspace); | ||
78 | if(queue) | ||
79 | gst_object_unref(queue); | ||
80 | if(sink) | ||
81 | gst_object_unref(sink); | ||
82 | |||
83 | return 0; | ||
84 | } | ||