From f8d0abd64ee3606d7927765cbb38c981fc6aa311 Mon Sep 17 00:00:00 2001 From: Reinhard Tartler Date: Sun, 6 Apr 2014 11:35:53 -0400 Subject: [PATCH] Imported Upstream version 0.5.0+svn5194~dfsg1 --- applications/dashcast/video_encoder.c | 2 +- applications/mp42ts/main.c | 451 ++++++---- applications/mp4box/filedump.c | 50 +- applications/mp4box/fileimport.c | 20 +- applications/mp4box/live.c | 4 +- applications/mp4box/main.c | 40 +- applications/mp4client/main.c | 54 +- applications/testapps/hevcbench/defbench.h | 111 +++ .../testapps/hevcbench/hevcbench.vcxproj | 262 ++++++ applications/testapps/hevcbench/main.c | 837 ++++++++++++++++++ configure | 58 +- doc/configuration.html | 22 +- extra_lib/include/openHevcWrapper.h | 3 + gui/gui.bt | 4 +- gui/gui.js | 287 +++--- gui/gwlib.js | 18 +- include/gpac/avparse.h | 1 + include/gpac/color.h | 27 +- include/gpac/compositor.h | 4 +- include/gpac/configuration.h | 55 +- include/gpac/constants.h | 2 +- include/gpac/dash.h | 14 +- include/gpac/events.h | 10 + include/gpac/events_constants.h | 2 + include/gpac/html5_media.h | 26 +- include/gpac/html5_mse.h | 34 +- include/gpac/ietf.h | 1 + include/gpac/internal/compositor_dev.h | 42 +- include/gpac/internal/isomedia_dev.h | 128 ++- include/gpac/internal/media_dev.h | 32 +- include/gpac/internal/mpd.h | 1 + include/gpac/internal/reedsolomon.h | 2 +- include/gpac/internal/scenegraph_dev.h | 4 +- include/gpac/internal/smjs_api.h | 4 + include/gpac/internal/terminal_dev.h | 59 +- include/gpac/ismacryp.h | 9 +- include/gpac/isomedia.h | 12 +- include/gpac/math.h | 3 - include/gpac/media_tools.h | 4 +- include/gpac/mediaobject.h | 14 +- include/gpac/modules/codec.h | 32 +- include/gpac/modules/service.h | 74 +- include/gpac/modules/video_out.h | 2 + include/gpac/mpeg4_odf.h | 2 +- include/gpac/mpegts.h | 98 +- include/gpac/scenegraph.h | 2 + include/gpac/scenegraph_svg.h | 3 +- include/gpac/setup.h | 3 + include/gpac/sync_layer.h | 2 + include/gpac/term_info.h | 5 +- include/gpac/tools.h | 10 +- include/gpac/version.h | 2 +- modules/Makefile | 6 +- modules/aac_in/faad_dec.c | 2 +- modules/ac3_in/ac3_in.c | 12 +- modules/ac3_in/liba52_dec.c | 2 +- modules/amr_dec/amr_dec.c | 2 +- modules/amr_float_dec/amr_float_dec.c | 2 +- modules/ctx_load/ctx_load.c | 6 +- modules/dx_hw/copy_pixels.c | 54 +- modules/dx_hw/dx_2d.c | 5 - modules/dx_hw/dx_video.c | 48 +- modules/dx_hw/dx_window.c | 3 +- modules/epoc_hw/epoc_codec.cpp | 2 +- modules/ffmpeg_in/ffmpeg_decode.c | 156 +++- modules/ffmpeg_in/ffmpeg_demux.c | 98 +- modules/ffmpeg_in/ffmpeg_in.h | 9 +- modules/ft_font/ft_font.c | 2 +- modules/gapi/gapi.cpp | 1 + modules/gpac_js/gpac_js.c | 12 +- modules/img_in/bmp_dec.c | 2 +- modules/img_in/jp2_dec.c | 2 +- modules/img_in/jpeg_dec.c | 2 +- modules/img_in/png_dec.c | 2 +- modules/isom_in/isom_in.h | 4 + modules/isom_in/load.c | 64 -- modules/isom_in/read.c | 24 +- modules/isom_in/read_ch.c | 6 +- modules/mp3_in/mad_dec.c | 2 +- modules/mpd_in/mpd_in.c | 133 ++- modules/mpegts_in/mpegts_in.c | 88 +- modules/mse_in/mse_in.c | 2 + modules/ogg/ogg_in.c | 20 +- modules/ogg/theora_dec.c | 2 +- modules/ogg/vorbis_dec.c | 2 +- modules/openhevc_dec/openhevc_dec.c | 270 ++++-- modules/opensvc_dec/opensvc_dec.c | 19 +- modules/osd/osd.c | 2 +- modules/rtp_in/rtp_in.h | 2 +- modules/rtp_in/sdp_load.c | 4 +- modules/rvc_dec/rvc_dec.c | 82 +- modules/sdl_out/video.c | 16 +- modules/widgetman/widget.c | 4 +- modules/widgetman/widgetman.c | 6 +- modules/x11_out/x11_out.c | 38 +- modules/xvid_dec/xvid_dec.c | 2 +- modules/xvid_dec/xvid_dec_wce.cpp | 2 +- .../bifs/bifs-2D-interactivity-mousesensor.bt | 2 +- src/bifs/script_enc.c | 12 +- src/compositor/audio_input.c | 12 +- src/compositor/audio_render.c | 23 +- src/compositor/compositor.c | 514 +++++++---- src/compositor/compositor_2d.c | 104 ++- src/compositor/compositor_3d.c | 3 +- src/compositor/gl_inc.h | 6 + src/compositor/mpeg4_textures.c | 8 +- src/compositor/svg_grouping.c | 2 +- src/compositor/svg_media.c | 32 +- src/compositor/texturing.c | 21 +- src/compositor/texturing.h | 2 + src/compositor/texturing_gl.c | 266 ++++-- src/compositor/visual_manager.h | 9 +- src/compositor/visual_manager_2d.h | 2 +- src/compositor/visual_manager_3d_gl.c | 364 ++++---- src/export.cpp | 13 +- src/ietf/rtp_depacketizer.c | 6 +- src/ietf/rtp_packetizer.c | 5 + src/ietf/rtp_pck_mpeg4.c | 4 +- src/ietf/rtp_streamer.c | 8 +- src/isomedia/avc_ext.c | 339 ++++--- src/isomedia/box_code_base.c | 3 +- src/isomedia/box_code_drm.c | 487 +++++++++- src/isomedia/box_dump.c | 101 +++ src/isomedia/box_funcs.c | 41 + src/isomedia/drm_sample.c | 111 ++- src/isomedia/isom_read.c | 53 +- src/isomedia/isom_write.c | 118 ++- src/isomedia/media.c | 18 +- src/isomedia/movie_fragments.c | 13 +- src/isomedia/stbl_read.c | 3 + src/isomedia/stbl_write.c | 29 +- src/isomedia/track.c | 110 ++- src/mcrypt/cbc.c | 18 +- src/media_tools/av_parsers.c | 329 +++++-- src/media_tools/dash_client.c | 475 +++++++--- src/media_tools/dash_segmenter.c | 108 ++- src/media_tools/filestreamer.c | 15 +- src/media_tools/html5_media.c | 280 +++++- src/media_tools/html5_mse.c | 500 +++++++---- src/media_tools/img.c | 2 +- src/media_tools/ismacryp.c | 272 +++++- src/media_tools/isom_tools.c | 112 ++- src/media_tools/m2ts_mux.c | 181 +++- src/media_tools/media_export.c | 30 +- src/media_tools/media_import.c | 94 +- src/media_tools/mpd.c | 2 + src/media_tools/mpegts.c | 673 +++++++++++--- src/media_tools/reedsolomon.c | 10 +- src/media_tools/webvtt.c | 7 +- src/scene_manager/loader_bt.c | 2 +- src/scene_manager/scene_dump.c | 2 +- src/scene_manager/swf_parse.c | 4 +- src/scenegraph/base_scenegraph.c | 3 + src/scenegraph/dom_events.c | 32 +- src/scenegraph/dom_smjs.c | 103 ++- src/scenegraph/html5_media_smjs.c | 398 +++++---- src/scenegraph/html5_mse_smjs.c | 261 ++++-- src/scenegraph/svg_attributes.c | 4 +- src/scenegraph/vrml_smjs.c | 9 +- src/terminal/channel.c | 76 +- src/terminal/clock.c | 6 +- src/terminal/decoder.c | 164 ++-- src/terminal/media_control.c | 4 +- src/terminal/media_manager.c | 47 +- src/terminal/media_memory.c | 75 +- src/terminal/media_memory.h | 5 +- src/terminal/media_object.c | 86 +- src/terminal/mpeg4_inline.c | 33 +- src/terminal/network_service.c | 119 ++- src/terminal/object_browser.c | 3 + src/terminal/object_manager.c | 129 ++- src/terminal/scene.c | 323 ++++++- src/terminal/term_node_init.c | 10 + src/terminal/terminal.c | 50 +- src/utils/color.c | 186 +++- src/utils/downloader.c | 31 +- src/utils/module.c | 15 +- src/utils/os_divers.c | 34 + 178 files changed, 9463 insertions(+), 2875 deletions(-) create mode 100644 applications/testapps/hevcbench/defbench.h create mode 100644 applications/testapps/hevcbench/hevcbench.vcxproj create mode 100644 applications/testapps/hevcbench/main.c diff --git a/applications/dashcast/video_encoder.c b/applications/dashcast/video_encoder.c index 1f3a41a..e174ed6 100644 --- a/applications/dashcast/video_encoder.c +++ b/applications/dashcast/video_encoder.c @@ -59,7 +59,7 @@ void build_dict(void *priv_data, const char *options) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Unknown custom option \"%s\" with value \"%s\" in %s\n", tok, tokval, options)); tok = strtok(NULL, "="); } - free(opt); + gf_free(opt); } int dc_video_encoder_open(VideoOutputFile *video_output_file, VideoDataConf *video_data_conf, Bool use_source_timing) diff --git a/applications/mp42ts/main.c b/applications/mp42ts/main.c index 50e5915..20e3368 100644 --- a/applications/mp42ts/main.c +++ b/applications/mp42ts/main.c @@ -36,6 +36,10 @@ #include #endif +#ifndef GPAC_DISABLE_TTXT +#include +#endif + #ifdef GPAC_DISABLE_ISOM #error "Cannot compile MP42TS if GPAC is not built with ISO File Format support" @@ -49,7 +53,7 @@ #endif -#define DEFAULT_PCR_OFFSET 18000 +#define DEFAULT_PCR_OFFSET 0 #define UDP_BUFFER_SIZE 0x40000 @@ -57,59 +61,78 @@ #define MP42TS_VIDEO_FREQ 1000 /*meant to send AVC IDR only every CLOCK_REFRESH ms*/ u32 temi_url_insertion_delay = 1000; +FILE *logfile = NULL; + +static void on_gpac_log(void *cbk, u32 ll, u32 lm, const char *fmt, va_list list) +{ + FILE *logs = cbk; + vfprintf(logs, fmt, list); + fflush(logs); +} -static GFINLINE void usage(const char * progname) +static GFINLINE void usage() { - fprintf(stderr, "USAGE: %s -rate=R [[-prog=prog1]..[-prog=progn]] [-audio=url] [-video=url] [-mpeg4-carousel=n] [-mpeg4] [-time=n] [-src=file] DST [[DST]]\n" + fprintf(stderr, "GPAC version " GPAC_FULL_VERSION "\n" + "GPAC Copyright (c) Telecom ParisTech 2000-2012\n" + "GPAC Configuration: " GPAC_CONFIGURATION "\n" + "Features: %s\n\n", gpac_features()); + fprintf(stderr, "mp2ts [options]\n" "\n" + "Inputs:\n" + "-prog filename specifies an input file used for a TS service\n" + " * currently only supports ISO files and SDP files\n" + " * can be used several times, once for each program\n" + "\n" + "Destinations:\n" + "Several destinations may be specified as follows, at least one is mandatory\n" + "-dst-udp UDP_address:port (multicast or unicast)\n" + "-dst-rtp RTP_address:port\n" + "-dst-file filename\n" + "The following parameters may be specified when -dst-file is used\n" + "-segment-dir dir server local directory to store segments (ends with a '/')\n" + "-segment-duration dur segment duration in seconds\n" + "-segment-manifest file m3u8 file basename\n" + "-segment-http-prefix p client address for accessing server segments\n" + "-segment-number n number of segments to list in the manifest\n" + "\n" + "Basic options:\n" + "-rate R specifies target rate in kbps of the multiplex (optional)\n" + "-real-time specifies the muxer will work in real-time mode\n" + " * if not specified, the muxer will generate the TS as quickly as possible\n" + " * automatically set for SDP or BT input\n" + "-pcr-init V sets initial value V for PCR - if not set, random value is used\n" + "-pcr-offset V offsets all timestamps from PCR by V, in 90kHz. Default value: %d\n" + "-psi-rate V sets PSI refresh rate V in ms (default 100ms).\n" + " * If 0, PSI data is only send once at the begining or before each IDR when -rap option is set.\n" + " * This should be set to 0 for DASH streams.\n" + "-time n request the muxer to stop after n ms\n" + "-single-au forces 1 PES = 1 AU (disabled by default)\n" + "-rap forces RAP/IDR to be aligned with PES start for video streams (disabled by default)\n" + " in this mode, PAT, PMT and PCR will be inserted before the first TS packet of the RAP PES\n" + "-flush-rap same as -rap but flushes all other streams (sends remaining PES packets) before inserting PAT/PMT\n" + "-nb-pack N specifies to pack N TS packets together before sending on network or writing to file\n" + "-ttl N specifies Time-To-Live for multicast. Default is 1.\n" + "-ifce IPIFCE specifies default IP interface to use. Default is IF_ANY.\n" + "-temi [URL] Inserts TEMI time codes in adaptation field. URL is optionnal\n" + "-temi-delay DelayMS Specifies delay between two TEMI url descriptors\n" + "\n" + "MPEG-4/T-DMB options:\n" + "-src filename update file: must be either an .sdp or a .bt file\n" + "-audio url may be mp3/udp or aac/http (shoutcast/icecast)\n" + "-video url shall be a raw h264 frame\n" + "-mpeg4-carousel n carousel period in ms\n" + "-mpeg4 or -4on2 forces usage of MPEG-4 signaling (IOD and SL Config)\n" + "-4over2 same as -4on2 and uses PMT to carry OD Updates\n" + "-bifs-pes carries BIFS over PES instead of sections\n" + "-bifs-pes-ex carries BIFS over PES without writing timestamps in SL\n" + "\n" + "Misc options\n" #ifdef GPAC_MEMORY_TRACKING - "\t-mem-track: enables memory tracker\n" + "-mem-track enables memory tracker\n" #endif - "\t-rate=R specifies target rate in kbps of the multiplex (mandatory)\n" - "\t-real-time specifies the muxer will work in real-time mode\n" - "\t * automatically set for SDP or BT input\n" - "\t-pcr-init=V sets initial value V for PCR - if not set, random value is used\n" - "\t-pcr-offset=V offsets all timestamps from PCR by V, in 90kHz. Default value: %d\n" - "\t-psi-rate=V sets PSI refresh rate V in ms (default 100ms). If 0, PSI data is only send once at the begining\n" - " or before each IDR when -rap option is set. This should be set to 0 for DASH streams.\n" - "\t-time=n request the program to stop after n ms\n" - "\t-single-au forces 1 PES = 1 AU (disabled by default)\n" - "\t-rap forces RAP/IDR to be aligned with PES start for video streams (disabled by default)\n" - " in this mode, PAT, PMT and PCR will be inserted before the first TS packet of the RAP PES\n" - "\t-flush-rap same as -rap but flushes all other streams (sends remaining PES packets) before inserting PAT/PMT\n" - "\t-prog=filename specifies an input file used for a TS service\n" - "\t * currently only supports ISO files and SDP files\n" - "\t * can be used several times, once for each program\n" - "\t-nb-pack=N specifies to pack N TS packets together before sending on network or writing to file\n" - "\t-ttl=N specifies Time-To-Live for multicast. Default is 1.\n" - "\t-ifce=IPIFCE specifies default IP interface to use. Default is IF_ANY.\n" - "\t-temi[=URL] Inserts TEMI time codes in adaptation field. URL is optionnal\n" - "\t-temi-delay=DelayMS Specifies delay between two TEMI url descriptors\n" - - "\tDST : Destinations, at least one is mandatory\n" - "\t -dst-udp UDP_address:port (multicast or unicast)\n" - "\t -dst-rtp RTP_address:port\n" - "\t -dst-file Supports the following arguments:\n" - "\t -segment-dir=dir server local directory to store segments\n" - "\t -segment-duration=dur segment duration in seconds\n" - "\t -segment-manifest=file m3u8 file basename\n" - "\t -segment-http-prefix=p client address for accessing server segments\n" - "\t -segment-number=n only n segments are used using a cyclic pattern\n" - "\t\n" - "\tMPEG-4 options\n" - "\t-mpeg4-carousel=n carousel period in ms\n" - "\t-mpeg4 or -4on2 forces usage of MPEG-4 signaling (IOD and SL Config)\n" - "\t-4over2 same as -4on2 and uses PMT to carry OD Updates\n" - "\t-bifs-pes carries BIFS over PES instead of sections\n" - "\t-bifs-pes-ex carries BIFS over PES without writing timestamps in SL\n" - "\tMisc options\n" - "\t-audio=url may be mp3/udp or aac/http (shoutcast/icecast)\n" - "\t-video=url shall be a raw h264 frame\n" - "\t-src=filename update file: must be either an .sdp or a .bt file\n\n" - "\t\n" - "\t-logs set log tools and levels, formatted as a ':'-separated list of toolX[:toolZ]@levelX\n" - "\t-h or -help print this screen\n" - "\n", progname, DEFAULT_PCR_OFFSET + "-logs set log tools and levels, formatted as a ':'-separated list of toolX[:toolZ]@levelX\n" + "-h or -help print this screen\n" + "\n", DEFAULT_PCR_OFFSET ); } @@ -142,6 +165,7 @@ typedef struct { GF_ISOFile *mp4; u32 track, sample_number, sample_count; + u32 mstype, mtype; GF_ISOSample *sample; /*refresh rate for images*/ u32 image_repeat_ms, nb_repeat_last; @@ -246,7 +270,7 @@ static u32 format_af_descriptor(char *af_data, u64 timecode, u32 timescale, u64 if (timescale || ntp) { len = 3; //3 bytes flags - if (timescale) len += 4 + (timecode > 0xFFFFFFFFUL) ? 8 : 4; + if (timescale) len += 4 + ((timecode > 0xFFFFFFFFUL) ? 8 : 4); if (ntp) len += 8; //write timeline descriptor @@ -288,6 +312,9 @@ static GF_Err mp4_input_ctrl(GF_ESInterface *ifce, u32 act_type, void *param) case GF_ESI_INPUT_DATA_FLUSH: { GF_ESIPacket pck; +#ifndef GPAC_DISABLE_TTXT + GF_List *cues = NULL; +#endif if (!priv->sample) priv->sample = gf_isom_get_sample(priv->mp4, priv->track, priv->sample_number+1, NULL); @@ -327,9 +354,41 @@ static GF_Err mp4_input_ctrl(GF_ESInterface *ifce, u32 act_type, void *param) pck.flags |= GF_ESI_DATA_AU_END; pck.data = priv->sample->data; pck.data_len = priv->sample->dataLength; + pck.duration = gf_isom_get_sample_duration(priv->mp4, priv->track, priv->sample_number+1); +#ifndef GPAC_DISABLE_TTXT + if (priv->mtype==GF_ISOM_MEDIA_TEXT && priv->mstype==GF_ISOM_SUBTYPE_WVTT) { + u64 start; + GF_WebVTTCue *cue; + GF_List *gf_webvtt_parse_iso_cues(GF_ISOSample *iso_sample, u64 start); + start = (priv->sample->DTS * 1000) / ifce->timescale; + cues = gf_webvtt_parse_iso_cues(priv->sample, start); + if (gf_list_count(cues)>1) { + GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS Muxer] More than one cue in sample\n")); + } + cue = (GF_WebVTTCue *)gf_list_get(cues, 0); + if (cue) { + pck.data = cue->text; + pck.data_len = (u32)strlen(cue->text)+1; + } else { + pck.data = NULL; + pck.data_len = 0; + } + } +#endif ifce->output_ctrl(ifce, GF_ESI_OUTPUT_DATA_DISPATCH, &pck); GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS Muxer] Track %d: sample %d CTS %d\n", priv->track, priv->sample_number+1, pck.cts)); +#ifndef GPAC_DISABLE_TTXT + if (cues) { + while (gf_list_count(cues)) { + GF_WebVTTCue *cue = (GF_WebVTTCue *)gf_list_get(cues, 0); + gf_list_rem(cues, 0); + gf_webvtt_cue_del(cue); + } + gf_list_del(cues); + cues = NULL; + } +#endif gf_isom_sample_del(&priv->sample); priv->sample_number++; @@ -394,6 +453,8 @@ static void fill_isom_es_ifce(M2TSProgram *prog, GF_ESInterface *ifce, GF_ISOFil priv->mp4 = mp4; priv->track = track_num; + priv->mtype = gf_isom_get_media_type(priv->mp4, priv->track); + priv->mstype = gf_isom_get_media_subtype(priv->mp4, priv->track, 1); priv->loop = prog->real_time ? 1 : 0; priv->sample_count = gf_isom_get_sample_count(mp4, track_num); prog->samples_count += priv->sample_count; @@ -416,7 +477,7 @@ static void fill_isom_es_ifce(M2TSProgram *prog, GF_ESInterface *ifce, GF_ISOFil case GPAC_OTI_AUDIO_AAC_MPEG2_LCP: case GPAC_OTI_AUDIO_AAC_MPEG2_SSRP: case GPAC_OTI_VIDEO_MPEG4_PART2: - ifce->decoder_config = gf_malloc(sizeof(char)*esd->decoderConfig->decoderSpecificInfo->dataLength); + ifce->decoder_config = (char *)gf_malloc(sizeof(char)*esd->decoderConfig->decoderSpecificInfo->dataLength); ifce->decoder_config_size = esd->decoderConfig->decoderSpecificInfo->dataLength; memcpy(ifce->decoder_config, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); break; @@ -426,6 +487,11 @@ static void fill_isom_es_ifce(M2TSProgram *prog, GF_ESInterface *ifce, GF_ISOFil case GPAC_OTI_VIDEO_SVC: gf_isom_set_nalu_extract_mode(mp4, track_num, GF_ISOM_NALU_EXTRACT_LAYER_ONLY | GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG | GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG | GF_ISOM_NALU_EXTRACT_VDRD_FLAG); break; + case GPAC_OTI_SCENE_VTT_MP4: + ifce->decoder_config = (char *)gf_malloc(sizeof(char)*esd->decoderConfig->decoderSpecificInfo->dataLength); + ifce->decoder_config_size = esd->decoderConfig->decoderSpecificInfo->dataLength; + memcpy(ifce->decoder_config, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); + break; } } gf_odf_desc_del((GF_Descriptor *)esd); @@ -1652,8 +1718,13 @@ static Bool open_program(M2TSProgram *prog, char *src, u32 carousel_rate, u32 mp } } +/*macro to keep retro compatibility with '=' and spaces in parse_args*/ +#define CHECK_PARAM(param) (!strnicmp(arg, param, strlen(param)) \ + && ( ((arg[strlen(param)] == '=') && (next_arg = arg+strlen(param)+1)) \ + || ((strlen(arg) == strlen(param)) && ++i && (i 150) { + fprintf(stderr, "URLs longer than 150 bytes are not currently supported\n"); + return GF_NOT_SUPPORTED; + } + } } - else if (!strnicmp(arg, "-dst-udp=", 9)) { - *real_time = 1; - } else if (!strnicmp(arg, "-dst-rtp=", 9)) { - *real_time = 1; + else if (CHECK_PARAM("-temi-delay")) { + temi_url_insertion_delay = atoi(next_arg); + } + else if (CHECK_PARAM("-dst-udp")) { + char *sep = strchr(next_arg, ':'); + dst_found = 1; + *real_time=1; + if (sep) { + *output_port = atoi(sep+1); + sep[0]=0; + *udp_out = gf_strdup(next_arg); + sep[0]=':'; + } else { + *udp_out = gf_strdup(next_arg); + } + } + else if (CHECK_PARAM("-dst-rtp")) { + char *sep = strchr(next_arg, ':'); + dst_found = 1; + *real_time=1; + if (sep) { + *output_port = atoi(sep+1); + sep[0]=0; + *rtp_out = gf_strdup(next_arg); + sep[0]=':'; + } else { + *rtp_out = gf_strdup(next_arg); + } + } else if (strnicmp(arg, "-prog", 5)) { //second pass arguments + error_msg = "unknown option"; + goto error; } } if (*real_time) force_real_time = 1; @@ -1818,111 +1980,14 @@ static GFINLINE GF_Err parse_args(int argc, char **argv, u32 *mux_rate, u32 *car for (i=1; i 150) { - fprintf(stderr, "URLs longer than 150 bytes are not currently supported\n"); - return GF_NOT_SUPPORTED; - } - } - } - else if (!strnicmp(arg, "-temi-delay=", 12)) { - temi_url_insertion_delay = atoi(arg+12); - } - else if (!strnicmp(arg, "-dst-udp=", 9)) { - char *sep = strchr(arg+9, ':'); - dst_found = 1; - *real_time=1; - if (sep) { - *output_port = atoi(sep+1); - sep[0]=0; - *udp_out = gf_strdup(arg+9); - sep[0]=':'; - } else { - *udp_out = gf_strdup(arg+9); - } - } - else if (!strnicmp(arg, "-dst-rtp=", 9)) { - char *sep = strchr(arg+9, ':'); - dst_found = 1; - *real_time=1; - if (sep) { - *output_port = atoi(sep+1); - sep[0]=0; - *rtp_out = gf_strdup(arg+9); - sep[0]=':'; - } else { - *rtp_out = gf_strdup(arg+9); - } - } - else if (!strnicmp(arg, "-audio=", 7) || !strnicmp(arg, "-video=", 7) || !strnicmp(arg, "-mpeg4", 6)) - ; /*already treated on the first pass*/ - else { -// error_msg = "unknown option \"%s\""; -// goto error; } } #if 0 @@ -1957,19 +2022,18 @@ static GFINLINE GF_Err parse_args(int argc, char **argv, u32 *mux_rate, u32 *car return GF_OK; } else { if (!dst_found) - fprintf(stderr, "Error: Destination argument not found\n\n"); + fprintf(stderr, "Error: Destination argument not found\n"); if (! *nb_progs) - fprintf(stderr, "Error: No Programs are available\n\n"); - if (!rate_found) - fprintf(stderr, "Error: Rate argument not found\n\n"); + fprintf(stderr, "Error: No Programs are available\n"); + usage(); return GF_BAD_PARAM; } error: if (!arg) { - fprintf(stderr, "Error: %s\n\n", error_msg); + fprintf(stderr, "Error: %s\n", error_msg); } else { - fprintf(stderr, "Error: %s \"%s\"\n\n", error_msg, arg); + fprintf(stderr, "Error: %s \"%s\"\n", error_msg, arg); } return GF_BAD_PARAM; } @@ -2032,7 +2096,7 @@ int main(int argc, char **argv) GF_Err e; u32 run_time; Bool real_time, single_au_pes, is_stdout; - u64 pcr_init_val=0; + s64 pcr_init_val = -1; u32 usec_till_next, ttl, split_rap; u32 i, j, mux_rate, nb_progs, cur_pid, carrousel_rate, last_print_time, last_video_time, bifs_use_pes, psi_refresh_rate, nb_pck_pack, nb_pck_in_pack; char *ts_out = NULL, *udp_out = NULL, *rtp_out = NULL, *audio_input_ip = NULL; @@ -2131,7 +2195,7 @@ int main(int argc, char **argv) /***************************/ muxer = gf_m2ts_mux_new(mux_rate, psi_refresh_rate, real_time); if (muxer) gf_m2ts_mux_use_single_au_pes_mode(muxer, single_au_pes); - if (pcr_init_val) gf_m2ts_mux_set_initial_pcr(muxer, pcr_init_val); + if (pcr_init_val>=0) gf_m2ts_mux_set_initial_pcr(muxer, (u64) pcr_init_val); if (ts_out != NULL) { if (segment_duration) { @@ -2534,6 +2598,7 @@ exit: if (aac_reader) AAC_Reader_del(aac_reader); #endif + if (logfile) fclose(logfile); gf_sys_close(); return 0; } diff --git a/applications/mp4box/filedump.c b/applications/mp4box/filedump.c index e2d228d..db9764b 100644 --- a/applications/mp4box/filedump.c +++ b/applications/mp4box/filedump.c @@ -42,7 +42,7 @@ #endif #include #include -#include +#include /*for asctime and gmtime*/ #include /*ISO 639 languages*/ @@ -1012,6 +1012,7 @@ void dump_file_nal(GF_ISOFile *file, u32 trackID, char *inName) GF_AVCConfig *avccfg, *svccfg; GF_HEVCConfig *hevccfg, *shvccfg; GF_AVCConfigSlot *slc; + Bool is_adobe_protection = GF_FALSE; memset(&avc, 0, sizeof(AVCState)); #endif @@ -1126,6 +1127,7 @@ void dump_file_nal(GF_ISOFile *file, u32 trackID, char *inName) fprintf(dump, " \n"); gf_isom_set_nalu_extract_mode(file, track, GF_ISOM_NALU_EXTRACT_INSPECT); + is_adobe_protection = gf_isom_is_adobe_protection_media(file, track, 1); for (i=0; idata; size = samp->dataLength; + if (is_adobe_protection) { + u8 encrypted_au = ptr[0]; + if (encrypted_au) { + fprintf(dump, " \n", i+1); + fprintf(dump, " \n\n"); + continue; + } + else { + ptr++; + size--; + } + } while (size) { u32 v = nalh_size; nal_size = 0; @@ -1418,9 +1432,9 @@ static void DumpMetaItem(GF_ISOFile *file, Bool root_meta, u32 tk_num, char *nam } #ifndef GPAC_DISABLE_HEVC -void dump_hevc_track_info(GF_ISOFile *file, u32 trackNum, GF_HEVCConfig *hevccfg) +void dump_hevc_track_info(GF_ISOFile *file, u32 trackNum, GF_HEVCConfig *hevccfg, HEVCState *hevc_state) { - u32 k; + u32 k, idx; fprintf(stderr, "\t%s Info: Profile %s @ Level %g - Chroma Format %d\n", hevccfg->is_shvc ? "SHVC" : "HEVC", gf_hevc_get_profile_name(hevccfg->profile_idc), ((Double)hevccfg->level_idc) / 30.0, hevccfg->chromaFormat); fprintf(stderr, "\tNAL Unit length bits: %d - general profile compatibility 0x%08X\n", 8*hevccfg->nal_unit_size, hevccfg->general_profile_compatibility_flags); fprintf(stderr, "\tParameter Sets: "); @@ -1429,23 +1443,30 @@ void dump_hevc_track_info(GF_ISOFile *file, u32 trackNum, GF_HEVCConfig *hevccfg if (ar->type==GF_HEVC_NALU_SEQ_PARAM) { fprintf(stderr, "%d SPS ", gf_list_count(ar->nalus)); } - else if (ar->type==GF_HEVC_NALU_PIC_PARAM) { + if (ar->type==GF_HEVC_NALU_PIC_PARAM) { fprintf(stderr, "%d PPS ", gf_list_count(ar->nalus)); } if (ar->type==GF_HEVC_NALU_VID_PARAM) { fprintf(stderr, "%d VPS ", gf_list_count(ar->nalus)); + + for (idx=0; idxnalus); idx++) { + GF_AVCConfigSlot *vps = gf_list_get(ar->nalus, idx); + gf_media_hevc_read_vps(vps->data, vps->size, hevc_state); + } } } + fprintf(stderr, "\n"); for (k=0; kparam_array); k++) { GF_HEVCParamArray *ar=gf_list_get(hevccfg->param_array, k); - u32 idx, width, height; + u32 width, height; s32 par_n, par_d; + if (ar->type !=GF_HEVC_NALU_SEQ_PARAM) continue; for (idx=0; idxnalus); idx++) { GF_AVCConfigSlot *sps = gf_list_get(ar->nalus, idx); par_n = par_d = -1; - gf_hevc_get_sps_info(sps->data, sps->size, NULL, &width, &height, &par_n, &par_d); + gf_hevc_get_sps_info_with_state(hevc_state, sps->data, sps->size, NULL, &width, &height, &par_n, &par_d); fprintf(stderr, "\tSPS resolution %dx%d", width, height); if ((par_n>0) && (par_d>0)) { u32 tw, th; @@ -1521,6 +1542,7 @@ void DumpTrackInfo(GF_ISOFile *file, u32 trackID, Bool full_dump) } gf_isom_get_audio_info(file, trackNum, 1, &sr, &nb_ch, &bps); + gf_isom_set_nalu_extract_mode(file, trackNum, GF_ISOM_NALU_EXTRACT_INSPECT); msub_type = gf_isom_get_media_subtype(file, trackNum, 1); if ((msub_type==GF_ISOM_SUBTYPE_MPEG4) @@ -1533,6 +1555,8 @@ void DumpTrackInfo(GF_ISOFile *file, u32 trackID, Bool full_dump) || (msub_type==GF_ISOM_SUBTYPE_LSR1) || (msub_type==GF_ISOM_SUBTYPE_HVC1) || (msub_type==GF_ISOM_SUBTYPE_HEV1) + || (msub_type==GF_ISOM_SUBTYPE_SHV1) + || (msub_type==GF_ISOM_SUBTYPE_SHC1) ) { esd = gf_isom_get_esd(file, trackNum, 1); if (!esd) { @@ -1629,9 +1653,14 @@ void DumpTrackInfo(GF_ISOFile *file, u32 trackID, Bool full_dump) } #endif /*GPAC_DISABLE_AV_PARSERS*/ - } else if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC) { + } else if ((esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC) + || (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_SHVC) + ) { #if !defined(GPAC_DISABLE_AV_PARSERS) && !defined(GPAC_DISABLE_HEVC) + HEVCState hevc_state; GF_HEVCConfig *hevccfg, *shvccfg; + memset(&hevc_state, 0, sizeof(HEVCState)); + hevc_state.sps_active_idx = -1; #endif gf_isom_get_visual_info(file, trackNum, 1, &w, &h); @@ -1644,12 +1673,12 @@ void DumpTrackInfo(GF_ISOFile *file, u32 trackID, Bool full_dump) fprintf(stderr, "\n\n\tNon-compliant HEVC track: No hvcC or shcC found in sample description\n"); } if (hevccfg) { - dump_hevc_track_info(file, trackNum, hevccfg); + dump_hevc_track_info(file, trackNum, hevccfg, &hevc_state); gf_odf_hevc_cfg_del(hevccfg); fprintf(stderr, "\n"); } if (shvccfg) { - dump_hevc_track_info(file, trackNum, shvccfg); + dump_hevc_track_info(file, trackNum, shvccfg, &hevc_state); gf_odf_hevc_cfg_del(shvccfg); } #endif /*GPAC_DISABLE_AV_PARSERS && defined(GPAC_DISABLE_HEVC)*/ @@ -1860,6 +1889,9 @@ void DumpTrackInfo(GF_ISOFile *file, u32 trackID, Bool full_dump) gf_isom_get_cenc_info(file, trackNum, 1, NULL, &scheme_type, &version, &IV_size); fprintf(stderr, "\n*Encrypted stream - CENC scheme %s (version %d)\n", gf_4cc_to_str(scheme_type), version); if (IV_size) fprintf(stderr, "Initialization Vector size: %d bits\n", IV_size*8); + } else if(gf_isom_is_adobe_protection_media(file, trackNum, 1)) { + gf_isom_get_adobe_protection_info(file, trackNum, 1, NULL, &scheme_type, &version); + fprintf(stderr, "\n*Encrypted stream - Adobe protection scheme %s (version %d)\n", gf_4cc_to_str(scheme_type), version); } else { fprintf(stderr, "\n*Encrypted stream - unknown scheme %s\n", gf_4cc_to_str(gf_isom_is_media_encrypted(file, trackNum, 1) )); } diff --git a/applications/mp4box/fileimport.c b/applications/mp4box/fileimport.c index 57b9813..9099ddc 100644 --- a/applications/mp4box/fileimport.c +++ b/applications/mp4box/fileimport.c @@ -88,6 +88,9 @@ void convert_file_info(char *inName, u32 trackID) } fprintf(stderr, "File has %d tracks\n", import.nb_tracks); } + if (import.probe_duration) { + fprintf(stderr, "Duration: %g ms\n", (Double) (import.probe_duration/1000.0)); + } found = 0; for (i=0; i=0) && (par_d>=0)) { e = gf_media_change_par(import.dest, i+1, par_n, par_d); } + + if (rap_only) { + e = gf_media_remove_non_rap(import.dest, i+1); + } + if (handler_name) gf_isom_set_handler_name(import.dest, i+1, handler_name); else if (!keep_handler) { char szHName[1024]; @@ -590,6 +601,9 @@ GF_Err import_file(GF_ISOFile *dest, char *inName, u32 import_flags, Double forc if ((import.tk_info[i].type==GF_ISOM_MEDIA_VISUAL) && (par_n>=-1) && (par_d>=-1)) { e = gf_media_change_par(import.dest, track, par_n, par_d); } + if (rap_only) { + e = gf_media_remove_non_rap(import.dest, track); + } if (handler_name) gf_isom_set_handler_name(import.dest, track, handler_name); else if (!keep_handler) { char szHName[1024]; @@ -1458,7 +1472,7 @@ GF_Err cat_isomedia_file(GF_ISOFile *dest, char *fileName, u32 import_flags, Dou GF_ISOFile *orig; GF_Err e; char *opts, *multi_cat; - Float ts_scale; + Double ts_scale; Double dest_orig_dur; u32 dst_tk, tk_id, mtype; u64 insert_dts; @@ -1720,7 +1734,7 @@ GF_Err cat_isomedia_file(GF_ISOFile *dest, char *fileName, u32 import_flags, Dou if (!count) insert_dts = 0; } - ts_scale = (Float) gf_isom_get_media_timescale(dest, dst_tk); + ts_scale = gf_isom_get_media_timescale(dest, dst_tk); ts_scale /= gf_isom_get_media_timescale(orig, i+1); /*if not a new track, see if we can merge the edit list - this is a crude test that only checks diff --git a/applications/mp4box/live.c b/applications/mp4box/live.c index 35cec70..c58fc70 100644 --- a/applications/mp4box/live.c +++ b/applications/mp4box/live.c @@ -456,7 +456,7 @@ static RTPChannel *set_broadcast_params(LiveSession *livesess, u16 esid, u32 per int live_session(int argc, char **argv) { GF_Err e; - int i; + u32 i; char *filename = NULL; char *dst = NULL; char *ifce_addr = NULL; @@ -819,7 +819,7 @@ int live_session(int argc, char **argv) gf_sleep(10); continue; } - ch = next_carousel(&livesess, &next_time); + ch = next_carousel(&livesess, (u32 *) &next_time); if ((ch==NULL) || (next_time > 20)) { gf_sleep(20); continue; diff --git a/applications/mp4box/main.c b/applications/mp4box/main.c index ece2eb2..58fbcee 100644 --- a/applications/mp4box/main.c +++ b/applications/mp4box/main.c @@ -89,6 +89,7 @@ u32 id3_get_genre_tag(const char *name); /*in filedump.c*/ #ifndef GPAC_DISABLE_SCENE_DUMP GF_Err dump_file_text(char *file, char *inName, u32 dump_mode, Bool do_log); +//void gf_check_isom_files(char *conf_rules, char *inName); #endif #ifndef GPAC_DISABLE_SCENE_STATS void dump_scene_stats(char *file, char *inName, u32 stat_level); @@ -224,6 +225,7 @@ void PrintGeneralUsage() " -cprt string adds copyright string to movie\n" " -chap file adds chapter information contained in file\n" " -rem trackID removes track from file\n" + " -rap trackID removes all non-RAP samples from track\n" " -enable trackID enables track\n" " -disable trackID disables track\n" " -new forces creation of a new destination file\n" @@ -328,6 +330,7 @@ void PrintDASHUsage() " -dash-scale SCALE specifies that timing for -dash and -frag are expressed in SCALE units per seconds\n" " -mem-frags fragments will be produced in memory rather than on disk before flushing to disk\n" " -pssh-moof stores PSSH boxes in first moof of each segments. By default PSSH are stored in movie box.\n" + " -sample-groups-traf stores sample group descriptions in traf (duplicated for each traf) rather than in moof. By default sample group descriptions are stored in movie box.\n" "\n" "Advanced Options, should not be needed when using -dash-profile:\n" @@ -1303,6 +1306,7 @@ typedef struct 7: disables track 8: referenceTrack 9: raw extraction + 10: remove non-rap */ u32 act_type; /*track ID*/ @@ -1498,6 +1502,7 @@ int mp4boxMain(int argc, char **argv) Bool enable_mem_tracker = 0; Bool dump_iod=0; Bool pssh_in_moof=0; + Bool samplegroups_in_traf=0; Bool daisy_chain_sidx=0; Bool single_segment=0; Bool single_file=0; @@ -1765,6 +1770,17 @@ int mp4boxMain(int argc, char **argv) else if (!stricmp(arg, "-dump-chap-ogg")) dump_chap = 2; else if (!stricmp(arg, "-hash")) do_hash = 1; +#if 0 + else if (!stricmp(arg, "-conf")) { + if (i+1==(u32)argc) { fprintf(stderr, "Missing arg - please check usage\n"); MP4BOX_EXIT_WITH_CODE(1); } + if (i+2==(u32)argc) { + gf_check_isom_files(NULL, argv[i+1]); + } else { + gf_check_isom_files(argv[i+1], argv[i+2]); + } + MP4BOX_EXIT_WITH_CODE(0); + } +#endif else if (!stricmp(arg, "-dmp4")) { dump_isom = 1; fprintf(stderr, "WARNING: \"-dmp4\" is deprecated - use \"-diso\" option\n"); @@ -1861,6 +1877,10 @@ int mp4boxMain(int argc, char **argv) } else if (!stricmp(arg, "-dash")) { CHECK_NEXT_ARG dash_duration = atof(argv[i+1]) / 1000; + if (dash_duration == 0.0) { + fprintf(stderr, "\tERROR: \"-dash-dash_duration\": invalid parameter %s\n", argv[i+1]); + MP4BOX_EXIT_WITH_CODE(1); + } i++; } else if (!stricmp(arg, "-subdur")) { CHECK_NEXT_ARG @@ -1962,6 +1982,8 @@ int mp4boxMain(int argc, char **argv) single_file = 1; } else if (!stricmp(arg, "-pssh-moof")) { pssh_in_moof = 1; + } else if (!stricmp(arg, "-sample-groups-traf")) { + samplegroups_in_traf = 1; } else if (!stricmp(arg, "-dash-profile") || !stricmp(arg, "-profile")) { CHECK_NEXT_ARG if (!stricmp(argv[i+1], "live") || !stricmp(argv[i+1], "simple")) dash_profile = GF_DASH_PROFILE_LIVE; @@ -2001,6 +2023,17 @@ int mp4boxMain(int argc, char **argv) } else if (!stricmp(arg, "-ocr")) force_ocr = 1; else if (!stricmp(arg, "-latm")) hint_flags |= GP_RTP_PCK_USE_LATM_AAC; else if (!stricmp(arg, "-rap")) { + if ((i+1 < (u32)argc) && (argv[i+1][0] != '-')) { + if (sscanf(argv[i+1], "%d", &trackID) == 1) { + tracks = gf_realloc(tracks, sizeof(TrackAction) * (nb_track_act+1)); + memset(&tracks[nb_track_act], 0, sizeof(TrackAction) ); + tracks[nb_track_act].act_type = 10; + tracks[nb_track_act].trackID = trackID; + nb_track_act++; + i++; + open_edit = 1; + } + } hint_flags |= GP_RTP_PCK_SIGNAL_RAP; seg_at_rap=1; } @@ -3000,7 +3033,7 @@ int mp4boxMain(int argc, char **argv) seg_at_rap, dash_duration, seg_name, seg_ext, segment_marker, interleaving_time, subsegs_per_sidx, daisy_chain_sidx, frag_at_rap, tmpdir, dash_ctx, dash_dynamic, mpd_update_time, time_shift_depth, dash_subduration, min_buffer, - ast_shift_sec, dash_scale, memory_frags, initial_moof_sn, initial_tfdt, no_fragments_defaults, pssh_in_moof); + ast_shift_sec, dash_scale, memory_frags, initial_moof_sn, initial_tfdt, no_fragments_defaults, pssh_in_moof, samplegroups_in_traf); if (e) break; if (dash_live) { @@ -3686,6 +3719,11 @@ int mp4boxMain(int argc, char **argv) e = gf_isom_set_track_reference(file, track, GF_4CC(tka->lang[0], tka->lang[1], tka->lang[2], tka->lang[3]), (u32) tka->delay_ms); needSave = 1; break; + case 10: + fprintf(stderr, "Removing non-rap samples from track %d\n", tka->trackID); + e = gf_media_remove_non_rap(file, track); + needSave = 1; + break; } if (e) goto err_exit; } diff --git a/applications/mp4client/main.c b/applications/mp4client/main.c index 013724a..4b77527 100644 --- a/applications/mp4client/main.c +++ b/applications/mp4client/main.c @@ -84,6 +84,7 @@ GF_User user; GF_Terminal *term; u64 Duration; GF_Err last_error = GF_OK; +static Bool enable_add_ons = GF_TRUE; static Bool request_next_playlist_item = GF_FALSE; FILE *playlist = NULL; @@ -209,6 +210,7 @@ void PrintUsage() "\n" "\t-exit: automatically exits when presentation is over\n" "\t-run-for TIME: runs for TIME seconds and exits\n" + "\t-no-addon: disable automatic loading of media addons declared in source URL\n" "\t-gui: starts in GUI mode. The GUI is indicated in GPAC config, section General, by the key [StartupFile]\n" "\n" "Dumper Options:\n" @@ -334,7 +336,7 @@ static void UpdateRTInfo(const char *legend) if (display_rti) { char szMsg[1024]; - if (rti.total_cpu_usage && (bench_mode!=2) ) { + if (rti.total_cpu_usage && (bench_mode<2) ) { sprintf(szMsg, "FPS %d CPU %2d (%02d) Mem %d kB", (u32) gf_term_get_framerate(term, 0), rti.total_cpu_usage, rti.process_cpu_usage, (u32) (rti.gpac_memory / 1024)); } else { @@ -343,7 +345,7 @@ static void UpdateRTInfo(const char *legend) } if (display_rti==2) { - if (bench_mode==2) { + if (bench_mode>=2) { PrintAVInfo(GF_FALSE); } fprintf(stderr, "%s\r", szMsg); @@ -779,6 +781,10 @@ Bool GPAC_EventProc(void *ptr, GF_Event *evt) } return 1; } + case GF_EVENT_ADDON_DETECTED: + if (enable_add_ons) + fprintf(stderr, "Media Addon %s detected - enabling it\n", evt->addon_connect.addon_url); + return enable_add_ons; } return 0; } @@ -957,6 +963,7 @@ int main (int argc, char **argv) Bool rgbd_dump = GF_FALSE; Bool depth_dump = GF_FALSE; Bool pause_at_first = GF_FALSE; + Double play_from = 0; #ifdef GPAC_MEMORY_TRACKING Bool enable_mem_tracker = GF_FALSE; #endif @@ -1122,6 +1129,9 @@ int main (int argc, char **argv) else if (!strcmp(arg, "-no-regulation")) no_regulation = 1; else if (!strcmp(arg, "-fs")) start_fs = 1; else if (!strcmp(arg, "-pause")) pause_at_first = 1; + else if (!strcmp(arg, "-play-from")) { + play_from = atof((const char *) argv[i+1]); + } else if (!strcmp(arg, "-exit")) auto_exit = 1; else if (!strcmp(arg, "-mem-track")) { #ifdef GPAC_MEMORY_TRACKING @@ -1134,6 +1144,8 @@ int main (int argc, char **argv) else if (!strcmp(arg, "-bench")) bench_mode = 1; else if (!strcmp(arg, "-vbench")) bench_mode = 2; else if (!strcmp(arg, "-sbench")) bench_mode = 3; + else if (!strcmp(arg, "-no-addon")) enable_add_ons = GF_FALSE; + else if (!strcmp(arg, "-opt")) { set_cfg_option(argv[i+1]); i++; @@ -1177,7 +1189,7 @@ int main (int argc, char **argv) if (gui_mode) { if (gui_mode==1) { hide_shell(1); - user.init_flags |= GF_TERM_WINDOW_NO_DECORATION; + //user.init_flags |= GF_TERM_WINDOW_NO_DECORATION; } } @@ -1236,7 +1248,7 @@ int main (int argc, char **argv) if (bench_mode!=2) { gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output"); gf_cfg_set_key(user.config, "RAWVideo", "RawOutput", "null"); - gf_cfg_set_key(user.config, "Compositor", "ForceOpenGL", "no"); + gf_cfg_set_key(user.config, "Compositor", "OpenGLMode", "disable"); } else { gf_cfg_set_key(user.config, "Video", "DisableVSync", "yes"); } @@ -1260,7 +1272,7 @@ int main (int argc, char **argv) if (bench_mode) { display_rti = 2; gf_term_set_option(term, GF_OPT_VIDEO_BENCH, (bench_mode==3) ? 2 : 1); - bench_mode=2; + if (bench_mode==1) bench_mode=2; } if (dump_mode) { @@ -1344,7 +1356,7 @@ int main (int argc, char **argv) } else { fprintf(stderr, "Opening URL %s\n", the_url); if (pause_at_first) fprintf(stderr, "[Status: Paused]\n"); - gf_term_connect_from_time(term, the_url, 0, pause_at_first); + gf_term_connect_from_time(term, the_url, (u64) (play_from*1000), pause_at_first); } } else { fprintf(stderr, "Hit 'h' for help\n\n"); @@ -1902,7 +1914,7 @@ static u32 last_odm_count = 0; void PrintAVInfo(Bool final) { GF_MediaInfo a_odi, v_odi, s_odi; - Float avg_dec_time=0; + Double avg_dec_time=0; u32 tot_time=0; Bool print_codecs = final; @@ -1930,7 +1942,7 @@ void PrintAVInfo(Bool final) GF_ObjectManager *odm = gf_term_get_object(term, root_odm, i); if (!odm) break; if (gf_term_get_object_info(term, odm, &v_odi) == GF_OK) { - if (!video_odm && (v_odi.od_type == GF_STREAM_VISUAL) && (v_odi.raw_media || (v_odi.cb_max_count>1)) ) { + if (!video_odm && (v_odi.od_type == GF_STREAM_VISUAL) && (v_odi.raw_media || (v_odi.cb_max_count>1) || v_odi.direct_video_memory) ) { video_odm = odm; } else if (!audio_odm && (v_odi.od_type == GF_STREAM_AUDIO)) { @@ -1949,10 +1961,13 @@ void PrintAVInfo(Bool final) } if (video_odm) { - gf_term_get_object_info(term, video_odm, &v_odi); + if (gf_term_get_object_info(term, video_odm, &v_odi)!= GF_OK) { + video_odm = NULL; + return; + } avg_dec_time = 0; if (v_odi.nb_dec_frames && v_odi.total_dec_time) { - avg_dec_time = (Float) 1000 * v_odi.nb_dec_frames; + avg_dec_time = (Float) 1000000 * v_odi.nb_dec_frames; avg_dec_time /= v_odi.total_dec_time; } } @@ -1967,6 +1982,7 @@ void PrintAVInfo(Bool final) tot_time = gf_sys_clock() - bench_mode_start; fprintf(stderr, " \r"); fprintf(stderr, "************** Bench Mode Done in %d ms ********************\n", tot_time); + if (bench_mode==3) fprintf(stderr, "** Systems layer only (no decoding) **\n"); if (!video_odm) { u32 nb_frames_drawn; @@ -1981,7 +1997,7 @@ void PrintAVInfo(Bool final) u32 dec_run_time = v_odi.last_frame_time - v_odi.first_frame_time; if (!dec_run_time) dec_run_time = 1; if (v_odi.duration) fprintf(stderr, "%d%% ", (u32) (100*v_odi.current_time / v_odi.duration ) ); - fprintf(stderr, "%d frames FPS %.2f (max %d ms/f) rate avg %d max %d", v_odi.nb_dec_frames, ((Float)v_odi.nb_dec_frames*1000) / dec_run_time, v_odi.max_dec_time, (u32) v_odi.avg_bitrate/1000, (u32) v_odi.max_bitrate/1000); + fprintf(stderr, "%d frames FPS %.2f (max "LLU" us/f) rate avg %d max %d", v_odi.nb_dec_frames, ((Float)v_odi.nb_dec_frames*1000) / dec_run_time, v_odi.max_dec_time, (u32) v_odi.avg_bitrate/1000, (u32) v_odi.max_bitrate/1000); if (v_odi.nb_droped) { fprintf(stderr, " (Error during bench: %d frames drop)", v_odi.nb_droped); } @@ -1994,7 +2010,7 @@ void PrintAVInfo(Bool final) u32 dec_run_time = a_odi.last_frame_time - a_odi.first_frame_time; if (!dec_run_time) dec_run_time = 1; if (a_odi.duration) fprintf(stderr, "%d%% ", (u32) (100*a_odi.current_time / a_odi.duration ) ); - fprintf(stderr, "%d frames (ms/f %.2f avg %d max) rate avg %d max %d", a_odi.nb_dec_frames, ((Float)dec_run_time)/a_odi.nb_dec_frames, a_odi.max_dec_time, (u32) a_odi.avg_bitrate/1000, (u32) a_odi.max_bitrate/1000); + fprintf(stderr, "%d frames (ms/f %.2f avg %.2f max) rate avg %d max %d", a_odi.nb_dec_frames, ((Float)dec_run_time)/a_odi.nb_dec_frames, a_odi.max_dec_time/1000.0, (u32) a_odi.avg_bitrate/1000, (u32) a_odi.max_bitrate/1000); if (a_odi.nb_droped) { fprintf(stderr, " (Error during bench: %d frames drop)", a_odi.nb_droped); } @@ -2009,7 +2025,7 @@ void PrintAVInfo(Bool final) if (s_odi.nb_dec_frames>2 && s_odi.total_dec_time) { u32 dec_run_time = s_odi.last_frame_time - s_odi.first_frame_time; if (!dec_run_time) dec_run_time = 1; - fprintf(stderr, "%d frames FPS %.2f (max %d ms/f) rate avg %d max %d", s_odi.nb_dec_frames, ((Float)s_odi.nb_dec_frames*1000) / dec_run_time, s_odi.max_dec_time, (u32) s_odi.avg_bitrate/1000, (u32) s_odi.max_bitrate/1000); + fprintf(stderr, "%d frames FPS %.2f (max "LLD" us/f) rate avg %d max %d", s_odi.nb_dec_frames, ((Float)s_odi.nb_dec_frames*1000) / dec_run_time, s_odi.max_dec_time, (u32) s_odi.avg_bitrate/1000, (u32) s_odi.max_bitrate/1000); fprintf(stderr, "\n"); } else { u32 nb_frames_drawn; @@ -2030,22 +2046,22 @@ void PrintAVInfo(Bool final) tot_time = v_odi.last_frame_time - v_odi.first_frame_time; if (!tot_time) tot_time=1; if (v_odi.duration) fprintf(stderr, "%d%% ", (u32) (100*v_odi.current_time / v_odi.duration ) ); - fprintf(stderr, "%d frames FPS %.2f (%dms max) - rate %d ", v_odi.nb_dec_frames, ((Float)v_odi.nb_dec_frames*1000) / tot_time, v_odi.max_dec_time, (u32) v_odi.instant_bitrate/1000); + fprintf(stderr, "%d f FPS %.2f (%.2f ms max) rate %d ", v_odi.nb_dec_frames, ((Float)v_odi.nb_dec_frames*1000) / tot_time, v_odi.max_dec_time/1000.0, (u32) v_odi.instant_bitrate/1000); } else if (scene_odm) { avg_dec_time = 0; if (s_odi.nb_dec_frames>2 && s_odi.total_dec_time) { - avg_dec_time = (Float) 1000 * s_odi.nb_dec_frames; + avg_dec_time = (Float) 1000000 * s_odi.nb_dec_frames; avg_dec_time /= s_odi.total_dec_time; if (s_odi.duration) fprintf(stderr, "%d%% ", (u32) (100*s_odi.current_time / s_odi.duration ) ); - fprintf(stderr, "%d frames %.2f (%dms max) - rate %d ", s_odi.nb_dec_frames, avg_dec_time, s_odi.max_dec_time, (u32) s_odi.instant_bitrate/1000); + fprintf(stderr, "%d f %.2f ("LLU" us max) - rate %d ", s_odi.nb_dec_frames, avg_dec_time, s_odi.max_dec_time, (u32) s_odi.instant_bitrate/1000); } else { u32 nb_frames_drawn; Double FPS = gf_term_get_simulation_frame_rate(term, &nb_frames_drawn); tot_time = gf_sys_clock() - bench_mode_start; FPS = gf_term_get_framerate(term, 1); - fprintf(stderr, "%d frames FPS %.2f (abs %.2f) ", nb_frames_drawn, (1000.0*nb_frames_drawn / tot_time), FPS); + fprintf(stderr, "%d f FPS %.2f (abs %.2f) ", nb_frames_drawn, (1000.0*nb_frames_drawn / tot_time), FPS); } } } @@ -2240,7 +2256,7 @@ void ViewOD(GF_Terminal *term, u32 OD_ID, u32 number) avg_dec_time = (Float) odi.total_dec_time; avg_dec_time /= odi.nb_dec_frames; } - fprintf(stderr, "\tBitrate over last second: %d kbps\n\tMax bitrate over one second: %d kbps\n\tAverage Decoding Time %.2f ms (%d max)\n\tTotal decoded frames %d\n", + fprintf(stderr, "\tBitrate over last second: %d kbps\n\tMax bitrate over one second: %d kbps\n\tAverage Decoding Time %.2f ms ("LLU" max)\n\tTotal decoded frames %d\n", (u32) odi.avg_bitrate/1024, odi.max_bitrate/1024, avg_dec_time, odi.max_dec_time, odi.nb_dec_frames); } if (odi.protection) fprintf(stderr, "Encrypted Media%s\n", (odi.protection==2) ? " NOT UNLOCKED" : ""); @@ -2468,7 +2484,7 @@ void PrintODBuffer(GF_Terminal *term, GF_ObjectManager *odm) fprintf(stderr, "\n * %d decoded frames - %d dropped frames\n", odi.nb_dec_frames, odi.nb_droped); avg_dec_time = 0; if (odi.nb_dec_frames) { avg_dec_time = (Float) odi.total_dec_time; avg_dec_time /= odi.nb_dec_frames; } - fprintf(stderr, " * Avg Bitrate %d kbps (%d max) - Avg Decoding Time %.2f ms (%d max)\n", + fprintf(stderr, " * Avg Bitrate %d kbps (%d max) - Avg Decoding Time %.2f ms ("LLU" max)\n", (u32) odi.avg_bitrate/1024, odi.max_bitrate/1024, avg_dec_time, odi.max_dec_time); } diff --git a/applications/testapps/hevcbench/defbench.h b/applications/testapps/hevcbench/defbench.h new file mode 100644 index 0000000..9dd8ea7 --- /dev/null +++ b/applications/testapps/hevcbench/defbench.h @@ -0,0 +1,111 @@ +/* + * GPAC - Multimedia Framework C SDK + * + * Authors: Jean Le Feuvre + * Copyright (c) Telecom ParisTech 2012 + * All rights reserved + * + * This file is part of GPAC - sample DASH library usage + * + */ + +#ifndef __DEF_BENCH_H__ +#define __DEF_BENCH_H__ + +#include +#include +#include +#define SDL_MAIN_HANDLED +#include +#include + +#define GL_GLEXT_PROTOTYPES + +#include +#include + + + + +#define GL_CHECK_ERR {s32 res = glGetError(); if (res) GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("GL Error %d file %s line %d\n", res, __FILE__, __LINE__)); } + +/*macros for GL proto and fun declaration*/ +#ifdef _WIN32_WCE +#define GLAPICAST * +#elif defined(WIN32) +#include +#define GLAPICAST APIENTRY * +#else +#define GLAPICAST * +#endif + +#define GLDECL(ret, funname, args) \ +typedef ret (GLAPICAST proc_ ## funname)args; \ +extern proc_ ## funname funname; \ + +#define GLDECL_STATIC(funname) proc_ ## funname funname = NULL + +#if defined GPAC_USE_TINYGL +//no extensions with TinyGL +#elif defined (GPAC_USE_OGL_ES) +//no extensions with OpenGL ES +#elif defined(WIN32) || defined (GPAC_CONFIG_WIN32) +#define LOAD_GL_FUNCS +#define GET_GLFUN(funname) funname = (proc_ ## funname) wglGetProcAddress(#funname) +#elif defined(CONFIG_DARWIN_GL) +extern void (*glutGetProcAddress(const GLubyte *procname))( void ); +#define GET_GLFUN(funname) funname = (proc_ ## funname) glutGetProcAddress(#funname) +#else +#define LOAD_GL_FUNCS +extern void (*glXGetProcAddress(const GLubyte *procname))( void ); +#define GET_GLFUN(funname) funname = (proc_ ## funname) glXGetProcAddress(#funname) +#endif + + + +#define DEL_SHADER(_a) if (_a) { glDeleteShader(_a); _a = 0; } +#define DEL_PROGRAM(_a) if (_a) { glDeleteProgram(_a); _a = 0; } + + +GLDECL(GLuint, glCreateProgram, (void) ) +GLDECL(void, glDeleteProgram, (GLuint ) ) +GLDECL(void, glLinkProgram, (GLuint program) ) +GLDECL(void, glUseProgram, (GLuint program) ) +GLDECL(GLuint, glCreateShader, (GLenum shaderType) ) +GLDECL(void, glDeleteShader, (GLuint shader) ) +GLDECL(void, glShaderSource, (GLuint shader, GLsizei count, const char **string, const GLint *length) ) +GLDECL(void, glCompileShader, (GLuint shader) ) +GLDECL(void, glAttachShader, (GLuint program, GLuint shader) ) +GLDECL(void, glDetachShader, (GLuint program, GLuint shader) ) +GLDECL(void, glGetShaderiv, (GLuint shader, GLenum type, GLint *res) ) +GLDECL(void, glGetInfoLogARB, (GLuint shader, GLint size, GLsizei *rsize, const char *logs) ) +GLDECL(GLint, glGetUniformLocation, (GLuint prog, const char *name) ) +GLDECL(void, glUniform1f, (GLint location, GLfloat v0) ) +GLDECL(void, glUniform1i, (GLint location, GLint v0) ) +GLDECL(void, glActiveTexture, (GLenum texture) ) +GLDECL(void, glClientActiveTexture, (GLenum texture) ) +GLDECL(void, glGenBuffers, (GLsizei , GLuint *) ) +GLDECL(void, glDeleteBuffers, (GLsizei , GLuint *) ) +GLDECL(void, glBindBuffer, (GLenum, GLuint ) ) +GLDECL(void, glBufferData, (GLenum, int, void *, GLenum) ) +GLDECL(void, glBufferSubData, (GLenum, int, int, void *) ) +GLDECL(void *, glMapBuffer, (GLenum, GLenum) ) +GLDECL(void *, glUnmapBuffer, (GLenum) ) + + +#define GL_TEXTURE_RECTANGLE_EXT 0x84F5 + +#define GL_INFO_LOG_LENGTH 0x8B84 +#define GL_FRAGMENT_SHADER 0x8B30 +#define GL_VERTEX_SHADER 0x8B31 +#define GL_PIXEL_UNPACK_BUFFER_ARB 0x88EC +#define GL_STREAM_DRAW_ARB 0x88E0 +#define GL_WRITE_ONLY_ARB 0x88B9 +#define GL_DYNAMIC_DRAW_ARB 0x88E8 + +#define GL_TEXTURE0 0x84C0 +#define GL_TEXTURE1 0x84C1 +#define GL_TEXTURE2 0x84C2 + + +#endif diff --git a/applications/testapps/hevcbench/hevcbench.vcxproj b/applications/testapps/hevcbench/hevcbench.vcxproj new file mode 100644 index 0000000..9f677d3 --- /dev/null +++ b/applications/testapps/hevcbench/hevcbench.vcxproj @@ -0,0 +1,262 @@ + + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {F728CC84-A7D1-43D2-8A28-05CE9F2FE0D0} + + + + Application + false + MultiByte + + + Application + false + MultiByte + + + Application + false + MultiByte + + + Application + false + MultiByte + + + + + + + + + + + + + + + + + + + + + + + <_ProjectFileVersion>10.0.40219.1 + ../../../bin/$(Platform)\$(Configuration)/ + ../../../bin/$(Platform)\$(Configuration)/ + .\obj\$(Platform)\$(Configuration)\$(ProjectName)\ + .\obj\$(Platform)\$(Configuration)\$(ProjectName)\ + true + true + ../../../bin/$(Platform)\$(Configuration)/ + ../../../bin/$(Platform)\$(Configuration)/ + .\obj\$(Platform)\$(Configuration)\$(ProjectName)\ + .\obj\$(Platform)\$(Configuration)\$(ProjectName)\ + false + false + + + + + + + + + + Disabled + C:\works\software\signals\modules\extra_lib\include\SDL2\;../../../include;../../../extra_lib/include;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_SCL_SECURE_NO_DEPRECATE;%(PreprocessorDefinitions) + true + EnableFastChecks + MultiThreadedDebugDLL + .\obj\mp42ts_deb/$(ProjectName).pch + .\obj\mp42ts_deb/ + .\obj\mp42ts_deb/ + .\obj\mp42ts_deb/ + true + Level3 + true + EditAndContinue + + + _DEBUG;%(PreprocessorDefinitions) + 0x040c + + + odbc32.lib;odbccp32.lib;%(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + true + C:\works\software\signals\modules\extra_lib\lib/$(Platform)/$(Configuration);../../../extra_lib/lib/$(Platform)/$(Configuration);%(AdditionalLibraryDirectories) + true + $(IntDir)$(ProjectName).pdb + Console + false + + + MachineX86 + + + true + + + + + + + + + + + Disabled + C:\works\software\signals\modules\extra_lib\include\SDL2\;../../../include;../../../extra_lib/include;%(AdditionalIncludeDirectories) + WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_SCL_SECURE_NO_DEPRECATE;%(PreprocessorDefinitions) + EnableFastChecks + MultiThreadedDebugDLL + .\obj\mp42ts_deb/$(ProjectName).pch + .\obj\mp42ts_deb/ + .\obj\mp42ts_deb/ + .\obj\mp42ts_deb/ + true + Level3 + true + ProgramDatabase + + + _DEBUG;%(PreprocessorDefinitions) + 0x040c + + + odbc32.lib;odbccp32.lib;%(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + true + C:\works\software\signals\modules\extra_lib\lib/$(Platform)/$(Configuration);../../../extra_lib/lib/$(Platform)/$(Configuration);%(AdditionalLibraryDirectories) + true + $(IntDir)$(ProjectName).pdb + Console + false + + + + + true + + + + + + + + + + + MaxSpeed + OnlyExplicitInline + C:\works\software\signals\modules\extra_lib\include\SDL2\;../../../include;../../../extra_lib/include;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_SCL_SECURE_NO_DEPRECATE;%(PreprocessorDefinitions) + true + MultiThreadedDLL + true + .\obj\mp42ts_rel/$(ProjectName).pch + .\obj\mp42ts_rel/ + .\obj\mp42ts_rel/ + .\obj\mp42ts_rel/ + Level3 + true + + + NDEBUG;%(PreprocessorDefinitions) + 0x040c + + + odbc32.lib;odbccp32.lib;%(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + true + C:\works\software\signals\modules\extra_lib\lib/$(Platform)/$(Configuration);../../../extra_lib/lib/$(Platform)/$(Configuration);%(AdditionalLibraryDirectories) + $(IntDir)$(ProjectName).pdb + Console + false + + + MachineX86 + + + true + + + + + + + + + + + Full + AnySuitable + ../../../include;../../../extra_lib/include;%(AdditionalIncludeDirectories) + WIN32;NDEBUG;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_SCL_SECURE_NO_DEPRECATE;%(PreprocessorDefinitions) + true + MultiThreadedDLL + true + .\obj\mp42ts_rel/$(ProjectName).pch + .\obj\mp42ts_rel/ + .\obj\mp42ts_rel/ + .\obj\mp42ts_rel/ + Level3 + true + true + Speed + + + NDEBUG;%(PreprocessorDefinitions) + 0x040c + + + odbc32.lib;odbccp32.lib;%(AdditionalDependencies) + $(OutDir)$(TargetName)$(TargetExt) + true + ../../../extra_lib/lib/$(Platform)/$(Configuration);%(AdditionalLibraryDirectories) + $(IntDir)$(ProjectName).pdb + Console + false + + + + + true + + + + + + + + {d3540754-e0cf-4604-ac11-82de9bd4d814} + + + + + + \ No newline at end of file diff --git a/applications/testapps/hevcbench/main.c b/applications/testapps/hevcbench/main.c new file mode 100644 index 0000000..04491aa --- /dev/null +++ b/applications/testapps/hevcbench/main.c @@ -0,0 +1,837 @@ +/* + * GPAC - Multimedia Framework C SDK + * + * Authors: Jean Le Feuvre + * Copyright (c) Telecom ParisTech 2012 + * All rights reserved + * + * This file is part of GPAC - sample DASH library usage + * + */ + +#include "defbench.h" + + + +#if defined(WIN32) && !defined(_WIN32_WCE) && !defined(__GNUC__) +# pragma comment(lib, "libLibOpenHevcWrapper") +#pragma comment(lib, "SDL2") +//#pragma comment(lib, "SDL2main") +#pragma comment(lib, "opengl32") +#endif + +//0: memcpy - 1: memmove - 2: u32 * cast and for loop copy of u32* - 3: memset 0 - 4: not touching the mapped buffer: 5: full memcpy, rely on stride in pixelstorei +#define COPY_TYPE 0 +//set to 1 to disable final gltexImage in PBO mode +#define NO_TEX 0 + + +SDL_Window *window = NULL; +SDL_GLContext *glctx= NULL; +SDL_Renderer *render= NULL; +GLint txid[3]; +u8 *pY = NULL; +u8 *pU = NULL; +u8 *pV = NULL; +u32 width = 0; +u32 height = 0; +u32 size=0; +u32 bpp=8; +u32 Bpp=1; +GLint memory_format=GL_UNSIGNED_BYTE; +GLint pixel_format=GL_LUMINANCE; +GLint texture_type=GL_TEXTURE_RECTANGLE_EXT; +u32 gl_nb_frames = 1; +u32 gl_upload_time = 0; +u32 gl_draw_time = 0; +Bool pbo_mode = GF_TRUE; +Bool first_tx_load = GF_FALSE; +Bool use_vsync=0; + +GLint glsl_program; +GLint vertex_shader; +GLint fragment_shader; + +GLint pbo_Y=0; +GLint pbo_U=0; +GLint pbo_V=0; + +GLDECL_STATIC(glActiveTexture); +GLDECL_STATIC(glClientActiveTexture); +GLDECL_STATIC(glCreateProgram); +GLDECL_STATIC(glDeleteProgram); +GLDECL_STATIC(glLinkProgram); +GLDECL_STATIC(glUseProgram); +GLDECL_STATIC(glCreateShader); +GLDECL_STATIC(glDeleteShader); +GLDECL_STATIC(glShaderSource); +GLDECL_STATIC(glCompileShader); +GLDECL_STATIC(glAttachShader); +GLDECL_STATIC(glDetachShader); +GLDECL_STATIC(glGetShaderiv); +GLDECL_STATIC(glGetInfoLogARB); +GLDECL_STATIC(glGetUniformLocation); +GLDECL_STATIC(glUniform1f); +GLDECL_STATIC(glUniform1i); +GLDECL_STATIC(glGenBuffers); +GLDECL_STATIC(glDeleteBuffers); +GLDECL_STATIC(glBindBuffer); +GLDECL_STATIC(glBufferData); +GLDECL_STATIC(glBufferSubData); +GLDECL_STATIC(glMapBuffer); +GLDECL_STATIC(glUnmapBuffer); + + +static char *glsl_yuv_shader = "\ + #version 140\n\ + #extension GL_ARB_texture_rectangle : enable\n\ + uniform sampler2DRect y_plane;\ + uniform sampler2DRect u_plane;\ + uniform sampler2DRect v_plane;\ + uniform float width;\ + uniform float height;\ + const vec3 offset = vec3(-0.0625, -0.5, -0.5);\ + const vec3 R_mul = vec3(1.164, 0.000, 1.596);\ + const vec3 G_mul = vec3(1.164, -0.391, -0.813);\ + const vec3 B_mul = vec3(1.164, 2.018, 0.000);\ + out vec4 FragColor;\ + void main(void) \ + {\ + vec2 texc;\ + vec3 yuv, rgb;\ + texc = gl_TexCoord[0].st;\ + texc.y = 1.0 - texc.y;\ + texc.x *= width;\ + texc.y *= height;\ + yuv.x = texture2DRect(y_plane, texc).r; \ + texc.x /= 2.0;\ + texc.y /= 2.0;\ + yuv.y = texture2DRect(u_plane, texc).r; \ + yuv.z = texture2DRect(v_plane, texc).r; \ + yuv += offset; \ + rgb.r = dot(yuv, R_mul); \ + rgb.g = dot(yuv, G_mul); \ + rgb.b = dot(yuv, B_mul); \ + FragColor = vec4(rgb, 1.0);\ + }"; + +static char *default_glsl_vertex = "\ + varying vec3 gfNormal;\ + varying vec3 gfView;\ + void main(void)\ + {\ + gfView = vec3(gl_ModelViewMatrix * gl_Vertex);\ + gfNormal = normalize(gl_NormalMatrix * gl_Normal);\ + gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;\ + gl_TexCoord[0] = gl_MultiTexCoord0;\ + }"; + + + +Bool sdl_compile_shader(u32 shader_id, const char *name, const char *source) +{ + GLint blen = 0; + GLsizei slen = 0; + u32 len; + if (!source || !shader_id) return 0; + len = (u32) strlen(source); + glShaderSource(shader_id, 1, &source, &len); + glCompileShader(shader_id); + + glGetShaderiv(shader_id, GL_INFO_LOG_LENGTH , &blen); + if (blen > 1) { + char* compiler_log = (char*) gf_malloc(blen); +#ifdef CONFIG_DARWIN_GL + glGetInfoLogARB((GLhandleARB) shader_id, blen, &slen, compiler_log); +#else + glGetInfoLogARB(shader_id, blen, &slen, compiler_log); +#endif + GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[GLSL] Failed to compile shader %s: %s\n", name, compiler_log)); + gf_free (compiler_log); + return 0; + } + return 1; +} + +void sdl_init(u32 _width, u32 _height, u32 _bpp, u32 stride, Bool use_pbo) +{ + u32 i, flags; + Float hw, hh; + GLint loc; + GF_Matrix mx; + width = _width; + height = _height; + bpp = _bpp; + + + SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); + SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16); + SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 0); + SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8); + SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8); + + flags = SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_BORDERLESS | SDL_WINDOW_MAXIMIZED; + if (use_vsync) flags |= SDL_RENDERER_PRESENTVSYNC; + window = SDL_CreateWindow("", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, width, height, flags); + glctx = SDL_GL_CreateContext(window); + SDL_GL_MakeCurrent(window, glctx); + + render = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED); + + +#if (COPY_TYPE==5) + size = stride*height; +#else + size = width*height; +#endif + if (bpp>8) { + size *= 2; + Bpp = 2; + } + pY = gf_malloc(size*sizeof(u8)); + memset(pY, 0x80, size*sizeof(u8)); + pU = gf_malloc(size/4*sizeof(u8)); + memset(pU, 0, size/4*sizeof(u8)); + pV = gf_malloc(size/4*sizeof(u8)); + memset(pV, 0, size/4*sizeof(u8)); + + glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + glViewport(0, 0, width, height); + + gf_mx_init(mx); + hw = ((Float)width)/2; + hh = ((Float)height)/2; + gf_mx_ortho(&mx, -hw, hw, -hh, hh, 50, -50); + glMatrixMode(GL_PROJECTION); + glLoadMatrixf(mx.m); + + + glMatrixMode(GL_TEXTURE); + glLoadIdentity(); + + glMatrixMode(GL_MODELVIEW); + glLoadIdentity(); + + glClear(GL_DEPTH_BUFFER_BIT); + glDisable(GL_NORMALIZE); + glDisable(GL_DEPTH_TEST); + glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST); + glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST); + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); + glDisable(GL_LINE_SMOOTH); + glDisable(GL_LINE_SMOOTH); + glDisable(GL_LIGHTING); + glDisable(GL_BLEND); + glDisable(GL_TEXTURE_2D); + glDisable(GL_CULL_FACE); + + + GET_GLFUN(glActiveTexture); + GET_GLFUN(glClientActiveTexture); + GET_GLFUN(glCreateProgram); + GET_GLFUN(glDeleteProgram); + GET_GLFUN(glLinkProgram); + GET_GLFUN(glUseProgram); + GET_GLFUN(glCreateShader); + GET_GLFUN(glDeleteShader); + GET_GLFUN(glShaderSource); + GET_GLFUN(glCompileShader); + GET_GLFUN(glAttachShader); + GET_GLFUN(glDetachShader); + GET_GLFUN(glGetShaderiv); + GET_GLFUN(glGetInfoLogARB); + GET_GLFUN(glGetUniformLocation); + GET_GLFUN(glUniform1f); + GET_GLFUN(glUniform1i); + GET_GLFUN(glGenBuffers); + GET_GLFUN(glDeleteBuffers); + GET_GLFUN(glBindBuffer); + GET_GLFUN(glBufferData); + GET_GLFUN(glBufferSubData); + GET_GLFUN(glMapBuffer); + GET_GLFUN(glUnmapBuffer); + + glsl_program = glCreateProgram(); + vertex_shader = glCreateShader(GL_VERTEX_SHADER); + sdl_compile_shader(vertex_shader, "vertex", default_glsl_vertex); + + fragment_shader = glCreateShader(GL_FRAGMENT_SHADER); + sdl_compile_shader(fragment_shader, "fragment", glsl_yuv_shader); + + glAttachShader(glsl_program, vertex_shader); + glAttachShader(glsl_program, fragment_shader); + glLinkProgram(glsl_program); + + glGenTextures(3, txid); + for (i=0; i<3; i++) { + + glEnable(texture_type); + glBindTexture(texture_type, txid[i] ); + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + if (bpp>8) { + glPixelTransferi(GL_RED_SCALE, 64); + memory_format=GL_UNSIGNED_SHORT; + } + glTexParameteri(texture_type, GL_TEXTURE_WRAP_S, GL_CLAMP); + glTexParameteri(texture_type, GL_TEXTURE_WRAP_T, GL_CLAMP); + glTexParameteri(texture_type, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(texture_type, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + + if (bpp>8) { + glPixelStorei(GL_UNPACK_ALIGNMENT, 2); + } else { + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + } + glDisable(texture_type); + } + + //sets uniforms: y, u, v textures point to texture slots 0, 1 and 2 + glUseProgram(glsl_program); + for (i=0; i<3; i++) { + const char *txname = (i==0) ? "y_plane" : (i==1) ? "u_plane" : "v_plane"; + loc = glGetUniformLocation(glsl_program, txname); + if (loc == -1) { + GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to locate texture %s in YUV shader\n", txname)); + continue; + } + glUniform1i(loc, i); + } + loc = glGetUniformLocation(glsl_program, "width"); + if (loc>= 0) { + Float w = (Float) width; + glUniform1f(loc, w); + } + loc = glGetUniformLocation(glsl_program, "height"); + if (loc>= 0) { + Float h = (Float) height; + glUniform1f(loc, h); + } + + glUseProgram(0); + + + if (glMapBuffer==NULL) use_pbo = GF_FALSE; + + + pbo_mode = use_pbo; + first_tx_load = use_pbo ? GF_FALSE : GF_TRUE; + if (use_pbo) { + glGenBuffers(1, &pbo_Y); + glGenBuffers(1, &pbo_U); + glGenBuffers(1, &pbo_V); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, pbo_Y); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size, NULL, GL_DYNAMIC_DRAW_ARB); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, pbo_U); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size/4, NULL, GL_DYNAMIC_DRAW_ARB); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, pbo_V); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size/4, NULL, GL_DYNAMIC_DRAW_ARB); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } +} + +void sdl_close() +{ + DEL_SHADER(vertex_shader); + DEL_SHADER(fragment_shader); + DEL_PROGRAM(glsl_program ); + + if (pbo_mode && pbo_Y) { + glDeleteBuffers(1, &pbo_Y); + glDeleteBuffers(1, &pbo_U); + glDeleteBuffers(1, &pbo_V); + } + + if (pY) gf_free(pY); + if (pU) gf_free(pU); + if (pV) gf_free(pV); + + if (glctx) SDL_GL_DeleteContext(glctx); + if (render) SDL_DestroyRenderer(render); + if (window) SDL_DestroyWindow(window); +} + +void sdl_draw_quad() +{ + Float w = ((Float)width)/2; + Float h = ((Float)height)/2; + + glBegin(GL_QUADS); + + glVertex3f(w, h, 0); + glTexCoord2f(1, 0); + + glVertex3f(w, -h, 0); + glTexCoord2f(0, 0); + + glVertex3f(-w, -h, 0); + glTexCoord2f(0, 1); + + glVertex3f(-w, h, 0); + glTexCoord2f(1, 1); + + glEnd(); +} + + +void sdl_draw_frame(u8 *pY, u8 *pU, u8 *pV, u32 w, u32 h, u32 bit_depth, u32 stride) +{ + u32 needs_stride = 0; + u32 now, end; + + if (stride != w) { + if (bit_depth==10) { + if (stride != 2*w) { + needs_stride = stride; + } + } else { + needs_stride = stride; + } + } + + glEnable(texture_type); + + now = gf_sys_clock(); + + + if (first_tx_load) { + glBindTexture(texture_type, txid[0] ); + if (needs_stride) glPixelStorei(GL_UNPACK_ROW_LENGTH, needs_stride); + glTexImage2D(texture_type, 0, 1, w, h, 0, pixel_format, memory_format, pY); + + glBindTexture(texture_type, txid[1] ); + if (needs_stride) glPixelStorei(GL_UNPACK_ROW_LENGTH, needs_stride/2); + glTexImage2D(texture_type, 0, 1, w/2, h/2, 0, pixel_format, memory_format, pU); + + glBindTexture(texture_type, txid[2] ); + if (needs_stride) glPixelStorei(GL_UNPACK_ROW_LENGTH, needs_stride/2); + glTexImage2D(texture_type, 0, 1, w/2, h/2, 0, pixel_format, memory_format, pV); + + if (needs_stride) glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); + first_tx_load = GF_FALSE; + } else if (pbo_mode) { + u32 i, linesize, count, p_stride; + u8 *ptr; +#if (COPY_TYPE==2) + u32 *s, *d; + u32 j, c2; +#endif + + glPixelStorei(GL_UNPACK_ROW_LENGTH, 0); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, pbo_Y); + ptr =(u8 *)glMapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB); +#if (COPY_TYPE==5) + memcpy(ptr, pY, size); +#elif (COPY_TYPE==3) + memset(ptr, 0x80, size); +#elif (COPY_TYPE==4) +#else + linesize = width*Bpp; + p_stride = stride*Bpp; + count = h; +#if (COPY_TYPE==2) + c2 = linesize/4; + s = (u32 *)pY; + d = (u32 *)ptr; +#endif + for (i=0; i=GF_ISOM_HEVCTYPE_HEVC_ONLY) { + track = i+1; + break; + } + } + + if (!track) { + gf_isom_close(isom); + sdl_close(); + gf_sys_close(); + return 0; + } + + count = gf_isom_get_sample_count(isom, track); + start = gf_sys_clock(); + + esd = gf_isom_get_esd(isom, track, 1); + ohevc = libOpenHevcInit(nb_threads, mode); + if (esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { + libOpenHevcCopyExtraData(ohevc, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength+8); + } + libOpenHevcStartDecoder(ohevc); + gf_odf_desc_del((GF_Descriptor *)esd); + gf_isom_set_sample_padding(isom, track, 8); + + run=1; + check_prompt=0; + for (i=0; idata, sample->dataLength, sample->DTS+sample->CTS_Offset) ) { + OpenHevc_Frame_cpy HVCFrame; + + libOpenHevcGetPictureInfo(ohevc, &HVCFrame.frameInfo); + if (!sdl_is_init && !no_display) { + sdl_init(HVCFrame.frameInfo.nWidth, HVCFrame.frameInfo.nHeight, HVCFrame.frameInfo.nBitDepth, HVCFrame.frameInfo.nYPitch+32, use_pbo); + sdl_is_init=1; + start = gf_sys_clock(); + nb_frames_at_start = i+1; + } + + if (no_display) { + OpenHevc_Frame HVCFrame_ptr; + libOpenHevcGetOutput(ohevc, 1, &HVCFrame_ptr); + } else if (use_raw_memory) { + OpenHevc_Frame HVCFrame_ptr; + libOpenHevcGetOutput(ohevc, 1, &HVCFrame_ptr); + + sdl_draw_frame((u8 *) HVCFrame_ptr.pvY, (u8 *) HVCFrame_ptr.pvU, (u8 *) HVCFrame_ptr.pvV, HVCFrame.frameInfo.nWidth, HVCFrame.frameInfo.nHeight, HVCFrame.frameInfo.nBitDepth, HVCFrame.frameInfo.nYPitch+32); + } else { + memset(&HVCFrame, 0, sizeof(OpenHevc_Frame) ); + HVCFrame.pvY = (void*) pY; + HVCFrame.pvU = (void*) pU; + HVCFrame.pvV = (void*) pV; + libOpenHevcGetOutputCpy(ohevc, 1, &HVCFrame); + sdl_draw_frame(pY, pU, pV, HVCFrame.frameInfo.nWidth, HVCFrame.frameInfo.nHeight, HVCFrame.frameInfo.nBitDepth, HVCFrame.frameInfo.nYPitch); + } + } + + gf_isom_sample_del(&sample); + + now = gf_sys_clock(); + fprintf(stderr, "%d %% %d frames in %d ms - FPS %02.2g - push time %d ms - draw %d ms\r", 100*(i+1-nb_frames_at_start)/count, i+1-nb_frames_at_start, now-start, 1000.0 * (i+1-nb_frames_at_start) / (now-start), gl_upload_time / gl_nb_frames , (gl_draw_time - gl_upload_time) / gl_nb_frames ); + } else { + gf_sleep(10); + i--; + } + check_prompt++; + if (check_prompt==50) { + if (gf_prompt_has_input()) { + switch (gf_prompt_get_char()) { + case 'q': + run = 0; + break; + case 'm': + use_raw_memory = !use_raw_memory; + break; + case 'p': + if (paused) { + paused=0; + start += gf_sys_clock()-pause_time; + } else { + paused = 1; + pause_time=gf_sys_clock(); + } + break; + case 'r': + start = gf_sys_clock(); + nb_frames_at_start = i+1; + gl_upload_time = gl_draw_time = 0; + gl_nb_frames=1; + break; + + } + } + check_prompt=0; + } + } + now = gf_sys_clock(); + fprintf(stderr, "Decoded %d frames in %d ms - FPS %g\n", i+1, now-start, 1000.0 * (i+1) / (now-start) ); + + libOpenHevcClose(ohevc); + gf_isom_close(isom); + + if (!no_display) + sdl_close(); + + gf_sys_close(); + return 1; +} + diff --git a/configure b/configure index 311b689..fcd621e 100755 --- a/configure +++ b/configure @@ -369,6 +369,9 @@ case "$cpu" in SunOS) canon_arch=`isainfo -n` ;; + Darwin) + canon_arch="x86_64" + ;; *) canon_arch="`$cc -dumpmachine | sed -e 's,\([^-]*\)-.*,\1,'`" ;; @@ -378,6 +381,7 @@ case "$cpu" in if [ -z "`echo $CFLAGS | grep -- -m32`" ]; then cpu="x86_64" want_pic="yes" + is_64="yes" fi fi ;; @@ -549,7 +553,7 @@ EOF GPAC_SH_FLAGS="" strip="strip -x" if test "$is_64" = "yes" ; then - LDFLAGS="$LDFLAGS -read_only_relocs warning" + LDFLAGS="$LDFLAGS" fi darwin="yes" gcc_version=`$cc -v 2>&1 | grep version | cut -d ' ' -f3` @@ -660,6 +664,16 @@ cxx_naked=$cxx cc="$cc $CFLAGS" cxx="$cxx $CXXFLAGS" +#look for zlib +cat > $TMPC << EOF +#include +int main( void ) { } +EOF + +if $cc -o $TMPO $TMPC -msse2 $LDFLAGS 2> /dev/null ; then + CFLAGS="$CFLAGS -msse2" +fi + #look for zlib cat > $TMPC << EOF @@ -899,31 +913,26 @@ EOF else js_flags="-DUSE_FFDEV_14 $js_flags" fi - else -cat > $TMPC << EOF -#include -int main( void ) { JSContext *cx; JS_SetContextThread(cx); } -EOF - if $cc -o $TMPO $TMPC $js_flags $LDFLAGS $js_lib 2> /dev/null ; then -cat > $TMPC << EOF -#include -int main( void ) { JSContext *cx; JS_SetRuntimeThread(cx); } -EOF - if $cc -o $TMPO $TMPC $js_flags $LDFLAGS $js_lib 2> /dev/null ; then - js_flags="$js_flags" - else - js_flags="-DNO_JS_RUNTIMETHREAD $js_flags" - fi - else - js_flags="-DUSE_FFDEV_11 $js_flags" - fi fi fi fi fi fi -#fi + + +if test "$has_js" != "no" ; then +cat > $TMPC << EOF +cat > $TMPC << EOF +#include +int main( void ) { JSContext *cx; JS_SetRuntimeThread(cx); } +EOF + if $cc -o $TMPO $TMPC $js_flags $LDFLAGS $js_lib 2> /dev/null ; then + js_flags="$js_flags" + else + js_flags="-DNO_JS_RUNTIMETHREAD $js_flags" + fi +fi #end JS test @@ -2980,6 +2989,10 @@ if test "$has_ipv6" = "yes" ; then echo "#define GPAC_HAS_IPV6" >> $TMPH fi +if test "$is_64" = "yes" ; then + echo "#define GPAC_64_BITS" >> $TMPH +fi + if test "$win32" = "yes" ; then echo "CONFIG_DIRECTX=$has_mingw_directx" >> config.mak if test "$has_mingw_directx" = "yes" ; then @@ -3047,7 +3060,12 @@ if test "$has_x11_xv" = "yes"; then fi if test "$is_64" = "yes"; then +#not on OSX ... +if test "$darwin" = "yes"; then + echo "X11_LIB_PATH=$X11_PATH/lib" >> config.mak +else echo "X11_LIB_PATH=$X11_PATH/lib64" >> config.mak +fi else echo "X11_LIB_PATH=$X11_PATH/lib" >> config.mak fi diff --git a/doc/configuration.html b/doc/configuration.html index 20194b0..ab6423d 100644 --- a/doc/configuration.html +++ b/doc/configuration.html @@ -10,7 +10,7 @@
GPAC Configuration file documentation
Version 0.5.0

-Last Modified $LastChangedDate: 2014-02-20 18:47:55 +0000 (Do, 20. Feb 2014) $ +Last Modified $LastChangedDate: 2014-03-27 06:31:20 -0400 (Do, 27. Mär 2014) $



@@ -221,7 +221,7 @@ Specifies the user prefered language as expressed in ISO 639-1. This is used to

DrawLateFrames [value: "yes" "no"]

-If set, late frames will still be drawn. If not set, the late frames are droped (or executed for systems decoders) untill the decoder output is back in sync. This is by default off to keep better sync, but may be usefull when testing heavy content or slow renderers. +If set, late frames will still be drawn. If not set, the late frames are droped (or executed for systems decoders) untill the decoder output is back in sync. This is by default on to keep better testing heavy content or slow renderers, but should be set to off when needing a better sync or monitoring skipped frames.

ForceSingleClock [value: "yes" "no"]

@@ -326,7 +326,7 @@ Allows video textures to be build directly from video decoder internal buffers. -ForceOpenGL [value: "yes", "no", "hybrid", "raster"] +ForceOpenGL [value: "always", "disable", "hybrid", "raster"]

Specifies that 2D rendering will be performed by OpenGL rather than raster 2D. This will involve polygon tesselation which may not be supported on all platforms, and 2D graphics will not loo as nice as 2D mode. The hybrid mode performs software drawing of 2D graphics with no textures (better quality) and uses OpenGL for all textures. The raster mode only uses OpenGL for pixel IO but does not perform polygin fill (no tesselation) (slow, mainly for test purposes).

DefaultNavigationMode [value: "Walk", "Fly", "Examine"] @@ -727,6 +727,15 @@ Specifies whether downloaded files should not be deleted.

AutoSwitchCount [value: positive integer]

For debug purposes, instructs the player to switch representation every N segments. If 0 (default), switching is disabled.

+BufferMode [value: segments, minBuffer, none] +

+Selects buffer mode: +

    +
  • segments: buffers complete segments as indicated in MPD before handing them to the player.
  • +
  • minBuffer: asks the player to buffer media for the time indicated in the MPD (default mode), but segments are not pre-buffered.
  • +
  • none: uses the player settings for buffering.
  • +
+

DisableSwitching [value: yes, no]

Disables automatic adaptation logic. Default is no

@@ -754,8 +763,11 @@ If chunk is selected, media data is re-parsed at each HTTP 1.1 chunk end. If alw Enables aborts of HTTP transfer when rate gets too low. This may result in a packet drops. Default is no.

UseServerUTC [value: yes, no]

-Enables using Server-UTC HTTP header to compensate any drift between client and server. Default is no. - +Enables using Server-UTC HTTP header to compensate any drift between client and server. Default is yes.

+DebugAdaptationSet [value: integer] +

+Plays only the adaptation set indicated by its index in the MPD. If index is negative, all sets are used (default mode). +



diff --git a/extra_lib/include/openHevcWrapper.h b/extra_lib/include/openHevcWrapper.h index cc1ee94..1d49ebf 100644 --- a/extra_lib/include/openHevcWrapper.h +++ b/extra_lib/include/openHevcWrapper.h @@ -65,6 +65,9 @@ void libOpenHevcClose(OpenHevc_Handle openHevcHandle); void libOpenHevcFlush(OpenHevc_Handle openHevcHandle); const char *libOpenHevcVersion(OpenHevc_Handle openHevcHandle); +void libOpenHevcSetActiveDecoders(OpenHevc_Handle openHevcHandle, int val); +void libOpenHevcSetViewLayers(OpenHevc_Handle openHevcHandle, int val); + #ifdef __cplusplus } #endif diff --git a/gui/gui.bt b/gui/gui.bt index 36487b2..50007f6 100644 --- a/gui/gui.bt +++ b/gui/gui.bt @@ -23,7 +23,9 @@ DEF GPAC_UI OrderedGroup { children [ Background2D {} - DEF MovieControl MediaControl {} + DEF MovieControl MediaControl { + loop FALSE + } DEF MovieSensor MediaSensor { } DEF Movie Transform2D { diff --git a/gui/gui.js b/gui/gui.js index d09946b..ebea4be 100644 --- a/gui/gui.js +++ b/gui/gui.js @@ -31,6 +31,17 @@ current_url = ''; current_duration = 0.0; current_time = 0.0; player_control = null; +icon_pause=1; +icon_play=0; +max_playercontrol_width=1024; +dynamic_scene=1; +screen_width = 0; +screen_height = 0; + +GF_STATE_PLAY=0; +GF_STATE_PAUSE=1; +GF_STATE_STOP=2; +GF_STATE_TRICK=3; all_extensions = []; @@ -60,8 +71,10 @@ function on_movie_active(value) function on_movie_time(value) { + var diff = current_time - value; + if (diff<0) diff = -diff; /*filter out every 1/2 seconds*/ - if (current_time+0.5 > value) return; + if (diff < 0.5) return; current_time = value; player_control.set_time(value); if (UPnP_Enabled) UPnP.MovieTime = value; @@ -229,7 +242,9 @@ function compute_movie_size(width, height) { var w, h, r_w, r_h; if (!width || !height) return; - + + if (width > gpac.screen_width) width = gpac.screen_width; + if (height > gpac.screen_height) height = gpac.screen_height; w = width; h = height; r_w = r_h = 1; @@ -268,11 +283,16 @@ function initialize() { root.children[0].backColor = gwskin.back_color; movie.children[0].on_size = function(evt) { + if (!movie_connected) { + movie_connected = true; + gpac.set_3d(evt.type3d ? 1 : 0); + player_control.play.switch_icon(icon_pause); + dynamic_scene = evt.dynamic_scene; + } if (!gpac.fullscreen) { compute_movie_size(evt.width, evt.height); } } - movie.children[0].addEventListener('gpac_scene_attached', movie.children[0].on_size, 0); movie.children[0].on_media_progress = function(evt) { if (!current_duration) return; @@ -281,13 +301,22 @@ function initialize() { var percent_playback = 100.0 * current_time / current_duration; //alert('URL data ' + percent_dload + ' - ' + percent_playback + ' playback'); } - movie.children[0].addEventListener('progress', movie.children[0].on_media_progress, 0); movie.children[0].on_media_playing = function(evt) { - alert('URL is now paying'); + player_control.play.switch_icon(icon_pause); + } + movie.children[0].on_media_end = function(evt) { + if (player_control.duration && movie_ctrl.loop) { + movie_ctrl.mediaStartTime = 0; + current_time=0; + } } + + movie.children[0].addEventListener('gpac_scene_attached', movie.children[0].on_size, 0); + movie.children[0].addEventListener('progress', movie.children[0].on_media_progress, 0); movie.children[0].addEventListener('playing', movie.children[0].on_media_playing, 0); movie.children[0].addEventListener('canplay', movie.children[0].on_media_playing, 0); + movie.children[0].addEventListener('ended', movie.children[0].on_media_end, 0); movie.children[0].on_media_waiting = function(evt) { alert('URL is now buffering'); @@ -414,7 +443,6 @@ function initialize() { if (url.indexOf('://')<0) set_movie_url('gpac://'+url); else set_movie_url(url); } else { -// show_dock(true); player_control.show(); } } @@ -437,7 +465,7 @@ function set_movie_url(url) test_resource.on_attached = function(evt) { this.callback_done = true; - var current_url = this.url[0]; + current_url = this.url[0]; /*process the error or connect service*/ if (evt.error) { @@ -445,17 +473,13 @@ function set_movie_url(url) gpacui_show_window(notif); } else { movie.children[0].url[0] = current_url; + movie_ctrl.mediaSpeed = 1; + movie_ctrl.mediaStartTime = 0; movie_ctrl.url[0] = current_url; movie_sensor.url[0] = current_url; root.children[0].set_bind = FALSE; - if (!movie_connected) { - if (!gpac.fullscreen) { - compute_movie_size(evt.width, evt.height); - } - movie_connected = true; - gpac.set_3d(evt.type3d ? 1 : 0); - } + movie.children[0].on_size(evt); } /*destroy the resource node*/ this.url.length = 0; @@ -464,6 +488,7 @@ function set_movie_url(url) this.on_attached = null; } + /*get notified when service loads or fails*/ test_resource.addEventListener('gpac_scene_attached', test_resource.on_attached, 0); @@ -495,8 +520,10 @@ function set_movie_url(url) //performs layout on all contents function layout() { - var i, list, start_x; - player_control.set_size(display_width, player_control.height); + var i, list, start_x, w; + w = display_width; + if (max_playercontrol_width && (w>max_playercontrol_width)) w=max_playercontrol_width; + player_control.set_size(w, player_control.height); dock.set_size(display_width, display_height); @@ -587,76 +614,106 @@ function new_player_control(container) wnd.snd_low = null; wnd.snd_ctrl = null; } - - if (0) { - wnd.rewind = gw_new_icon_button(wnd.infobar, 'icons/media-seek-backward.svg', 'Rewind', 'icon'); - wnd.rewind.set_size(small_control_icon_size, small_control_icon_size); - } else { - wnd.rewind = null; - } - if (1) { - wnd.stop = gw_new_icon_button(wnd.infobar, 'icons/media-playback-stop.svg', 'Stop', 'icon'); - wnd.stop.on_click = function() { - player_control.stoped_url = ''+current_url; + + wnd.set_state = function(state) { + if (!movie_connected && !controlled_renderer) return; + + if (state==this.state) return; + + if (state == GF_STATE_STOP) { + this.stoped_url = ''+current_url; if (controlled_renderer) controlled_renderer.Stop(); else { set_movie_url(''); /*override movie_connected to avoid auto-resizing*/ movie_connected = true; } - movie_ctrl.mediaStartTime = 0; - player_control.media_line.set_value(0); - player_control.play.switch_icon(1); - } - wnd.stop.set_size(small_control_icon_size, small_control_icon_size); - } else { - wnd.stop = null; - } - - wnd.play = gw_new_icon_button(wnd.infobar, 'icons/media-playback-start.svg', 'Play', 'icon'); - wnd.play.set_size(control_icon_size, control_icon_size); - wnd.play.state = 0; - wnd.play.add_icon('icons/media-playback-pause.svg'); - wnd.play.on_click = function() { - if (!movie_connected && !controlled_renderer) return; - if (player_control.stoped_url) { + this.media_line.set_value(0); + this.play.switch_icon(icon_play); + this.state = GF_STATE_STOP; + return; + } + if (state==GF_STATE_PAUSE) { + if (this.state==GF_STATE_STOP) return; + if (controlled_renderer) controlled_renderer.Pause(); + movie_ctrl.mediaSpeed = 0; + this.state=GF_STATE_PAUSE; + this.play.switch_icon(icon_play); + return; + } + //we are playing, resume from stop if needed + if (this.stoped_url) { if (controlled_renderer) { controlled_renderer.Play(); } else { - set_movie_url(player_control.stoped_url); + set_movie_url(this.stoped_url); } - player_control.stoped_url = null; - this.state = 0; - movie_ctrl.mediaStartTime = -1; - } else if (movie_ctrl.mediaSpeed != 1) { - this.state = 0; - } else { - this.state = this.state ? 0 : 1; + this.stoped_url = null; + //not in trick mode, next pause/play will restart from current time + if (state != GF_STATE_TRICK) + movie_ctrl.mediaStartTime = -1; } - this.switch_icon(this.state); - if (this.state) { - if (controlled_renderer) controlled_renderer.Pause(); - movie_ctrl.mediaSpeed = 0; - } else { + + + if (state==GF_STATE_PLAY) { if (controlled_renderer) controlled_renderer.Play(); + this.state = state; movie_ctrl.mediaSpeed = 1; + this.play.switch_icon(icon_pause); + return; } + if (state==GF_STATE_TRICK) { + this.state = state; + this.play.switch_icon(icon_play); + movie_ctrl.mediaStartTime = -1; + return; + } + } + + wnd.stop = gw_new_icon_button(wnd.infobar, 'icons/media-playback-stop.svg', 'Stop', 'icon'); + wnd.stop.on_click = function() { + player_control.set_state(GF_STATE_STOP); + } + wnd.stop.set_size(small_control_icon_size, small_control_icon_size); + + + if (0) { + wnd.rewind = gw_new_icon_button(wnd.infobar, 'icons/media-seek-backward.svg', 'Rewind', 'icon'); + wnd.rewind.set_size(small_control_icon_size, small_control_icon_size); + } else { + wnd.rewind = null; + } + + wnd.play = gw_new_icon_button(wnd.infobar, 'icons/media-playback-start.svg', 'Play', 'icon'); + wnd.play.set_size(control_icon_size, control_icon_size); + wnd.state = GF_STATE_PLAY; + wnd.play.add_icon('icons/media-playback-pause.svg'); + wnd.play.on_click = function() { + player_control.set_state( (player_control.state==GF_STATE_PLAY) ? GF_STATE_PAUSE : GF_STATE_PLAY); } - wnd.play.on_long_click = function () { - var cur_url = current_url; - set_movie_url(''); - set_movie_url(cur_url); + + if (!browser_mode) { + wnd.forward = gw_new_icon_button(wnd.infobar, 'icons/media-seek-forward.svg', 'Forward', 'icon'); + wnd.forward.on_click = function() { + if (movie_ctrl.mediaSpeed) { + player_control.set_state(GF_STATE_TRICK); + movie_ctrl.mediaSpeed = 2*movie_ctrl.mediaSpeed; + } + } + wnd.forward.set_size(small_control_icon_size, small_control_icon_size); + } else { + wnd.forward = null; } wnd.media_line = gw_new_progress_bar(wnd.infobar, false, true); wnd.media_line.on_slide = function(value, type) { - if (!movie_connected && !controlled_renderer) { this.set_value(0); return; } + var duration = player_control.duration; if (!duration) return; var time = value*duration/100; @@ -667,15 +724,24 @@ function new_player_control(container) } root.children[0].set_bind = FALSE; switch (type) { + //sliding case 1: + player_control.set_state(GF_STATE_PAUSE); movie_ctrl.mediaStartTime = time; movie_ctrl.mediaSpeed = 0; break; + //done sliding case 2: + player_control.set_state(GF_STATE_PLAY); if (time!= movie_ctrl.mediaStartTime) movie_ctrl.mediaStartTime = time; movie_ctrl.mediaSpeed = 1; break; + //init slide, go in play mode default: + if (player_control.state==GF_STATE_STOP) + player_control.set_state(GF_STATE_PLAY); + + player_control.set_state(GF_STATE_PAUSE); movie_ctrl.mediaStartTime = time; break; } @@ -693,26 +759,22 @@ function new_player_control(container) wnd.time.set_size(control_icon_size, control_icon_size); wnd.time.set_width(4*wnd.time.font_size() ); - if (!browser_mode) { - wnd.forward = gw_new_icon_button(wnd.infobar, 'icons/media-seek-forward.svg', 'Forward', 'icon'); - wnd.forward.on_click = function() { - if (movie_ctrl.mediaSpeed) { - movie_ctrl.mediaSpeed = 2*movie_ctrl.mediaSpeed; - } + if (0) { + wnd.loop = gw_new_icon_button(wnd.infobar, 'vector/loop.svg', 'Loop', 'icon'); + wnd.loop.on_click = function () { + movie_ctrl.loop = movie_ctrl.loop ? FALSE : TRUE; } - wnd.forward.set_size(small_control_icon_size, small_control_icon_size); + wnd.loop.set_size(small_control_icon_size, small_control_icon_size); } else { - wnd.forward = null; + wnd.loop = null; } - if (1) { - wnd.view = gw_new_icon_button(wnd.infobar, 'icons/edit-find.svg', 'Navigation', 'icon'); - wnd.view.on_click = function() { - select_navigation_type(); - } - wnd.view.set_size(small_control_icon_size, small_control_icon_size); - } else { - wnd.view = null; + + wnd.view = gw_new_icon_button(wnd.infobar, 'icons/edit-find.svg', 'Navigation', 'icon'); + wnd.view.on_click = function() { + select_navigation_type(); } + wnd.view.set_size(small_control_icon_size, small_control_icon_size); + if (!browser_mode) { wnd.open = gw_new_icon_button(wnd.infobar, 'icons/folder.svg', 'Open', 'icon'); @@ -763,12 +825,12 @@ function new_player_control(container) var min_w, full_w, time_w; var control_icon_size = gwskin.default_icon_size; this.move(0, Math.floor( (height-display_height)/2) ); - + width -= control_icon_size/2; min_w = this.play.width + this.time.width; if (this.open) min_w += this.open.width; if (this.home) min_w += this.home.width; - if (this.exit) min_w += this.exit.width; + if (this.exit && gpac.fullscreen) min_w += this.exit.width; full_w = 0; if (this.snd_low) full_w += this.snd_low.width; if (this.snd_ctrl) full_w += this.snd_ctrl.width; @@ -776,21 +838,32 @@ function new_player_control(container) if (this.view) { this.view.hide(); - if (movie_connected && (gpac.navigation_type!= GF_NAVIGATE_TYPE_NONE) ) { + if (!dynamic_scene && movie_connected && (gpac.navigation_type!= GF_NAVIGATE_TYPE_NONE) ) { full_w+= this.view.width; } } if (this.duration) { + if (this.stop) full_w += this.stop.width; + if (this.play) full_w += this.play.width; if (this.rewind) full_w+= this.rewind.width; if (this.forward) full_w+= this.forward.width; + if (this.loop) full_w += this.loop.width; } - if (this.stop) full_w+= this.stop.width; if (this.remote && UPnP.MediaRenderersCount && (current_url!='')) { full_w += this.remote.width; } time_w = this.media_line.visible ? 2*control_icon_size : 0; + + if (this.exit) { + if (gpac.fullscreen) { + this.exit.show(); + } else { + this.exit.hide(); + } + } + if (min_w + full_w + time_w < width) { if (this.media_line.visible) this.media_line.set_size(width - min_w - full_w - control_icon_size/3, control_icon_size/3); @@ -800,8 +873,9 @@ function new_player_control(container) if (this.duration) { if (this.rewind) this.rewind.show(); if (this.forward) this.forward.show(); + if (this.loop) this.loop.show(); + if (this.stop) this.stop.show(); } - if (this.stop) this.stop.show(); if (wnd.fullscreen) wnd.fullscreen.show(); if (this.remote) { @@ -812,7 +886,7 @@ function new_player_control(container) } } - if (this.view && movie_connected && (gpac.navigation_type!= GF_NAVIGATE_TYPE_NONE) ) { + if (this.view && !dynamic_scene && movie_connected && (gpac.navigation_type!= GF_NAVIGATE_TYPE_NONE) ) { this.view.show(); } } else { @@ -822,6 +896,7 @@ function new_player_control(container) if (this.rewind) this.rewind.hide(); if (this.stop) this.stop.hide(); if (this.forward) this.forward.hide(); + if (this.loop) this.loop.hide(); if (this.fullscreen) this.fullscreen.hide(); if (this.remote) this.remote.hide(); @@ -853,25 +928,29 @@ function new_player_control(container) wnd.duration = 0; wnd.set_duration = function(value) { this.duration = value; - wnd.time.show(); - wnd.media_line.show(); - if (wnd.rewind) wnd.rewind.show(); - if (wnd.stop) wnd.stop.show(); - if (wnd.forward) wnd.forward.show(); if (!value) { wnd.time.hide(); wnd.media_line.hide(); if (wnd.rewind) wnd.rewind.hide(); if (wnd.stop) wnd.stop.hide(); if (wnd.forward) wnd.forward.hide(); + if (wnd.loop) wnd.loop.hide(); wnd.time.set_size(0, control_icon_size); wnd.time.set_width(0); - } else if (value<3600) { - wnd.time.set_size(control_icon_size/2, control_icon_size); - wnd.time.set_width(3*wnd.time.font_size() ); } else { - wnd.time.set_size(control_icon_size, control_icon_size); - wnd.time.set_width(4*wnd.time.font_size() ); + wnd.time.show(); + wnd.media_line.show(); + if (wnd.rewind) wnd.rewind.show(); + if (wnd.stop) wnd.stop.show(); + if (wnd.forward) wnd.forward.show(); + if (wnd.loop) wnd.loop.show(); + if (value<3600) { + wnd.time.set_size(control_icon_size/2, control_icon_size); + wnd.time.set_width(3*wnd.time.font_size() ); + } else { + wnd.time.set_size(control_icon_size, control_icon_size); + wnd.time.set_width(4*wnd.time.font_size() ); + } } this.layout(this.width, this.height); } @@ -923,17 +1002,25 @@ function open_local_file() filebrowse.browse(gpac.last_working_directory); filebrowse.on_browse = function(value, directory) { - if (directory) gpac.last_working_directory = directory; - set_movie_url(value); - show_dock(false); + if (value==null) { + player_control.set_state(this.prev_state); + player_control.show(); + } else { + if (directory) gpac.last_working_directory = directory; + set_movie_url(value); + show_dock(false); + } } + var w = display_width/2; + if (w<200) w = display_width-20; + filebrowse.set_size(w, 3*display_height/4); + if (gpac.hardware_rgba) filebrowse.set_alpha(0.8); - filebrowse.set_size(display_width, display_height); + player_control.hide(); gpacui_show_window(filebrowse); - set_movie_url(''); -// filebrowse.set_alpha(0.8); - + filebrowse.prev_state = player_control.state; + player_control.set_state(GF_STATE_PAUSE); } urldlg = null; diff --git a/gui/gwlib.js b/gui/gwlib.js index 9c7ab05..4e4ab1d 100644 --- a/gui/gwlib.js +++ b/gui/gwlib.js @@ -420,7 +420,7 @@ gwskin.window.font_size = 14; gwskin.window.width = 320; gwskin.window.height = 240; gwskin.window.normal = gw_new_appearance(0.6, 0.6, 0.6); -gwskin.window.normal.texture = gw_make_gradient('vertical', [0, 0.85, 1], [0.6, 0.6, 0.6, 1, 1, 1, 0.6, 0.6, 0.6]); +//gwskin.window.normal.texture = gw_make_gradient('vertical', [0, 0.85, 1], [0.6, 0.6, 0.6, 1, 1, 1, 0.6, 0.6, 0.6]); gwskin.window.normal.skin = true; gwskin.window.text = gwskin.label.text; gwskin.window.font = gw_new_fontstyle(gwskin.window.font_size, 1); @@ -1822,8 +1822,11 @@ function gw_new_listbox(container) } start_y -= children[i].height; } - if (this.selected_idx < this.first_visible) this.selected_idx = this.first_visible; - else if (this.selected_idx > this.last_visible) this.selected_idx = this.last_visible; +// if (this.selected_idx < this.first_visible) this.selected_idx = this.first_visible; +// else if (this.selected_idx > this.last_visible) this.selected_idx = this.last_visible; + + if (this.selected_idx < this.first_visible) this.selected_idx = -1; + else if (this.selected_idx > this.last_visible) this.selected_idx = -1; } obj.add_child = function(child) { this.children[0].children[this.children[0].children.length] = child; @@ -1856,6 +1859,10 @@ function gw_new_listbox(container) return 1; case GF_EVENT_KEYDOWN: var children = this.get_children(); + + if (this.selected_idx < this.first_visible) this.selected_idx = this.first_visible; + else if (this.selected_idx > this.last_visible) this.selected_idx = this.last_visible; + if (evt.keycode=='Up') { // alert('sel '+ this.selected_idx + ' first '+this.first_visible+ ' last '+this.last_visible); if (children[this.selected_idx].translation.y + children[this.selected_idx].height/2 > this.height/2 ) { @@ -1920,6 +1927,11 @@ function gw_new_file_open(class_name) } } + dlg.on_close = function () { + if (this.on_browse) { + this.on_browse(null, false); + } + } dlg.go_up = dlg.add_tool(gwskin.images.previous, gwskin.labels.up); dlg.go_up.on_click = function() { diff --git a/include/gpac/avparse.h b/include/gpac/avparse.h index b3a816e..c5b2260 100644 --- a/include/gpac/avparse.h +++ b/include/gpac/avparse.h @@ -223,6 +223,7 @@ GF_Err gf_avc_get_sps_info(char *sps, u32 sps_size, u32 *sps_id, u32 *width, u32 GF_Err gf_avc_get_pps_info(char *pps, u32 pps_size, u32 *pps_id, u32 *sps_id); const char *gf_avc_get_profile_name(u8 video_prof); +//hevc_state is optionnal but shall be used for layer extensions since all size info is in VPS and not SPS GF_Err gf_hevc_get_sps_info(char *sps_data, u32 sps_size, u32 *sps_id, u32 *width, u32 *height, s32 *par_n, s32 *par_d); const char *gf_hevc_get_profile_name(u8 video_prof); #endif /*GPAC_DISABLE_AV_PARSERS*/ diff --git a/include/gpac/color.h b/include/gpac/color.h index 8ed7c59..5356ac7 100644 --- a/include/gpac/color.h +++ b/include/gpac/color.h @@ -227,13 +227,38 @@ typedef struct u8 high; } GF_ColorKey; -/*!\brief not done yet +/*!\brief stretches two video surfaces * + * Software stretch of source surface ont destination surface. + *\param dst destination surface + *\param src source surface + *\param dst_wnd destination rectangle. If null the entire destination surface is used + *\param src_wnd source rectangle. If null the entire source surface is used + *\param alpha blend factor of source over alpha + *\param flip flips the source + *\param colorKey makes source pixel matching the color key transparent + *\param cmat applies color matrix to the source + *\return error code if any */ GF_Err gf_stretch_bits(GF_VideoSurface *dst, GF_VideoSurface *src, GF_Window *dst_wnd, GF_Window *src_wnd, u8 alpha, Bool flip, GF_ColorKey *colorKey, GF_ColorMatrix * cmat); +/*!\brief copies YUV 420 10 bits to YUV destination (only YUV420 8 bits supported) + * + * Software stretch of source surface ont destination surface. + *\param vs_dst destination surface + *\param pY source Y plane + *\param pU source U plane. if NULL, the U plane is located after the Y plane + *\param pV source V plane. if NULL, the V plane is located after the U plane + *\param src_stride source stride in bytes + *\param src_width source width in pixels + *\param src_height source height in pixels + *\param src_wnd source rectangle. If null the entire source surface is used + *\return error code if any + */ +GF_Err gf_color_write_yv12_10_to_yuv(GF_VideoSurface *vs_dst, unsigned char *pY, unsigned char *pU, unsigned char*pV, u32 src_stride, u32 src_width, u32 src_height, const GF_Window *src_wnd); + /*! @} */ diff --git a/include/gpac/compositor.h b/include/gpac/compositor.h index e0ba680..9487a46 100644 --- a/include/gpac/compositor.h +++ b/include/gpac/compositor.h @@ -55,7 +55,7 @@ void gf_sc_set_fps(GF_Compositor *sr, Double fps); GF_Err gf_sc_set_scene(GF_Compositor *sr, GF_SceneGraph *scene_graph); /*if the compositor doesn't use its own thread for visual, this will perform a render pass*/ -Bool gf_sc_draw_frame(GF_Compositor *sr); +Bool gf_sc_draw_frame(GF_Compositor *sr, u32 *ms_till_next); /*inits rendering info for the node - shall be called for all nodes the parent system doesn't handle*/ void gf_sc_on_node_init(GF_Compositor *sr, GF_Node *node); @@ -68,6 +68,8 @@ void gf_sc_invalidate(GF_Compositor *sr, GF_Node *byObj); /*return the compositor time - this is the time every time line syncs on*/ u32 gf_sc_get_clock(GF_Compositor *sr); +//signals the node is about to be destroyed (called after the node destructor if any). If node is NULL, SG will be set to indicate the entire scene graph is about to be reset +void gf_sc_node_destroy(GF_Compositor *compositor, GF_Node *node, GF_SceneGraph *sg); /*locks/unlocks the visual scene rendering - modification of the scene tree shall only happen when scene compositor is locked*/ void gf_sc_lock(GF_Compositor *sr, Bool doLock); diff --git a/include/gpac/configuration.h b/include/gpac/configuration.h index 58d7fa8..d36dc4c 100644 --- a/include/gpac/configuration.h +++ b/include/gpac/configuration.h @@ -31,20 +31,17 @@ /*this file defines all common macros for libgpac compilation except for symbian32 which uses .mmp directives ... */ -#if defined(WIN32) || defined(_WIN32_WCE) || defined(GPAC_CONFIG_DARWIN) /*visual studio and xcode*/ -/*enables GPAC fixed point*/ -//#define GPAC_FIXED_POINT + +/*visual studio and xcode*/ +#if defined(WIN32) || defined(_WIN32_WCE) || defined(GPAC_CONFIG_DARWIN) /*enables GPAC memory tracking in debug mode only*/ #if defined(DEBUG) || defined(_DEBUG) #define GPAC_MEMORY_TRACKING #endif -/*platform is big endian*/ -//#define GPAC_BIG_ENDIAN - -/*SSL enabled*/ +/*SSL enabled - no 64 bit support yet*/ #if defined(WIN32) && !defined(_WIN64) #define GPAC_HAS_SSL #endif @@ -53,47 +50,48 @@ #define GPAC_HAS_SPIDERMONKEY #ifdef GPAC_CONFIG_DARWIN #define MOZILLA_1_8_BRANCH +#define XP_UNIX #endif -/*zlib enabled*/ -//#define GPAC_DISABLE_ZLIB - /*libjpeg enabled*/ #define GPAC_HAS_JPEG - /*pnj enabled*/ #define GPAC_HAS_PNG /*IPv6 enabled - for win32, this is evaluated at compile time, !! do not uncomment !!*/ -//#define GPAC_HAS_IPV6 -/*3D compositor disabled*/ -#ifdef GPAC_CONFIG_DARWIN -//#define GPAC_DISABLE_3D -#endif -/*use TinyGL instead of OpenGL*/ -//#define GPAC_USE_TINYGL +//iOS compilation +#if defined(GPAC_CONFIG_DARWIN) && defined(GPAC_IPHONE) -/*use OpenGL ES instead of OpenGL*/ -#ifdef GPAC_CONFIG_DARWIN #define GPAC_USE_OGL_ES #define GPAC_FIXED_POINT -#ifdef GPAC_IPHONE #define GPAC_HAS_GLU -#endif -#endif - /*lazy definition of extra libs for iOS*/ -#if defined(GPAC_IPHONE) #define GPAC_HAS_FAAD //#define GPAC_HAS_MAD #define GPAC_HAS_SDL #define GPAC_HAS_FREETYPE + +#endif //end iOS flags + + +//OSX compilation +#if defined(GPAC_CONFIG_DARWIN) && !defined(GPAC_IPHONE) + +#define GPAC_HAS_IPV6 +#define GPAC_HAS_SSL + +#ifdef __LP64__ +#define GPAC_64_BITS #endif +#endif //end OSX flags + + +//WinCE flags #if defined(_WIN32_WCE) #ifndef GPAC_FIXED_POINT @@ -113,11 +111,11 @@ #define GPAC_USE_OGL_ES #endif -#endif /*_WIN32_WCE*/ +#endif //WinCE flags -#endif /*defined(WIN32) || defined(_WIN32_WCE)*/ +#endif /*defined(WIN32) || defined(_WIN32_WCE) || defined(GPAC_CONFIG_DARWIN)*/ #if defined(__SYMBIAN32__) @@ -235,5 +233,8 @@ /*disables dashclient */ //#define GPAC_DISABLE_DASH_CLIENT +/*disables Timed Text support */ +//#define GPAC_DISABLE_TTXT + #endif /*_GF_CONFIG_H_*/ diff --git a/include/gpac/constants.h b/include/gpac/constants.h index 1a98020..8a648f0 100644 --- a/include/gpac/constants.h +++ b/include/gpac/constants.h @@ -276,7 +276,7 @@ enum GPAC_OTI_VIDEO_HEVC = 0x23, /*!OTI for H264-SVC streams*/ GPAC_OTI_VIDEO_SVC = 0x24, - /*!OTI for H264-SVC streams*/ + /*!OTI for HEVC layered streams*/ GPAC_OTI_VIDEO_SHVC = 0x25, /*!OTI for MPEG-4 AAC streams*/ GPAC_OTI_AUDIO_AAC_MPEG4 = 0x40, diff --git a/include/gpac/dash.h b/include/gpac/dash.h index c82c1c2..7a6f555 100644 --- a/include/gpac/dash.h +++ b/include/gpac/dash.h @@ -174,6 +174,9 @@ void *gf_dash_get_group_udta(GF_DashClient *dash, u32 group_index); /*indicates whether a group is selected for playback or not. Currently groups cannot be selected during playback*/ Bool gf_dash_is_group_selected(GF_DashClient *dash, u32 group_index); +/*indicates whether a group can be selected for playback or not. Some groups may have been disabled because of non supported features*/ +Bool gf_dash_is_group_selectable(GF_DashClient *dash, u32 idx); + /*selects a group for playback. If other groups are alternate to this group (through the @group attribute), they are automatically deselected. */ void gf_dash_group_select(GF_DashClient *dash, u32 idx, Bool select); @@ -270,8 +273,8 @@ u32 gf_dash_get_min_buffer_time(GF_DashClient *dash); //shifts UTC clock of server by shift_utc_ms so that new UTC in MPD is old + shift_utc_ms void gf_dash_set_utc_shift(GF_DashClient *dash, s32 shift_utc_ms); -//sets max resolution for all video -GF_Err gf_dash_set_max_resolution(GF_DashClient *dash, u32 width, u32 height); +//sets max resolution@bpp for all video +GF_Err gf_dash_set_max_resolution(GF_DashClient *dash, u32 width, u32 height, u8 max_display_bpp); //sets min time in ms between a 404 and the next request on the same group. The default value is 500 ms. GF_Err gf_dash_set_min_timeout_between_404(GF_DashClient *dash, u32 min_timeout_between_404); @@ -279,6 +282,13 @@ GF_Err gf_dash_set_min_timeout_between_404(GF_DashClient *dash, u32 min_timeout_ //sets time in ms after which 404 request for a segment will indicate segment lost. The default value is 100 ms. GF_Err gf_dash_set_segment_expiration_threshold(GF_DashClient *dash, u32 expire_after_ms); + +//only enables the given group - this shall be set before calling @gf_dash_open. If group_index is <0 (default) no groups will be disabled. +void gf_dash_debug_group(GF_DashClient *dash, s32 group_index); + +//indicates typical buffering used by the user app . This allows fetching data earlier in live mode, if the timeshiftbuffer allows for it +void gf_dash_set_user_buffer(GF_DashClient *dash, u32 buffer_time_ms); + #endif //GPAC_DISABLE_DASH_CLIENT diff --git a/include/gpac/events.h b/include/gpac/events.h index 789e8cd..a8e17aa 100644 --- a/include/gpac/events.h +++ b/include/gpac/events.h @@ -223,6 +223,15 @@ typedef struct Bool is_connected; } GF_EventConnect; +/*event proc return value: 1 to indicate the terminal should attempt a default layout for this addon, 0: nothing will be done*/ +typedef struct +{ + /*GF_EVENT_ADDON_DETECTED*/ + u8 type; + const char *addon_url; + const char *mime_type; +} GF_EventAddonConnect; + /*event proc return value: 1 if info has been completed, 0 otherwise (and operation this request was for will then fail)*/ typedef struct @@ -304,6 +313,7 @@ typedef union GF_EventMutation mutation; GF_EventForwarded forwarded_event; GF_EventOpenFile open_file; + GF_EventAddonConnect addon_connect; } GF_Event; diff --git a/include/gpac/events_constants.h b/include/gpac/events_constants.h index c512d19..ec644a9 100644 --- a/include/gpac/events_constants.h +++ b/include/gpac/events_constants.h @@ -228,6 +228,8 @@ typedef enum { /* Events for Keyboad */ GF_EVENT_TEXT_EDITING_START, GF_EVENT_TEXT_EDITING_END, + + GF_EVENT_ADDON_DETECTED, } GF_EventType; /*GPAC/DOM3 key codes*/ diff --git a/include/gpac/html5_media.h b/include/gpac/html5_media.h index 9d6ab0a..7ed55fe 100644 --- a/include/gpac/html5_media.h +++ b/include/gpac/html5_media.h @@ -138,6 +138,7 @@ typedef struct _timerange JSObject *_this; GF_List *times; + u32 timescale; } GF_HTML_MediaTimeRanges; typedef enum { @@ -160,11 +161,13 @@ typedef enum { u32 packet_index; /* index of MSE Packets*/\ GF_Mutex *buffer_mutex;\ Bool last_dts_set; \ - double last_dts; /* MSE last decode timestamp */ \ + u64 last_dts; /* MSE last decode timestamp (in timescale units)*/ \ + u32 last_dur; /* MSE last frame duration (in timescale units)*/ \ Bool highest_pts_set; \ - double highest_pts; /* MSE highest presentation timestamp */ \ + u64 highest_pts; /* MSE highest presentation timestamp (in timescale units)*/ \ Bool needs_rap; /* MSE need random access point flag */ \ u32 timescale; /* used by time stamps in MSE Packets */ \ + s64 timestampOffset; /* MSE SourceBuffer value (in timescale units) */ \ /* standard HTML properties */ \ GF_HTML_TrackType type;\ char *id;\ @@ -219,9 +222,9 @@ typedef struct /* JavaScript counterpart */ JSObject *_this; - GF_HTML_MediaTimeRanges buffered; - GF_HTML_MediaTimeRanges seekable; - GF_HTML_MediaTimeRanges played; + GF_HTML_MediaTimeRanges *buffered; + GF_HTML_MediaTimeRanges *seekable; + GF_HTML_MediaTimeRanges *played; Bool paused; GF_HTML_MediaControllerPlaybackState playbackState; double defaultPlaybackRate; @@ -250,7 +253,7 @@ typedef struct /* crossOrigin: "must reflect the content of the attribute of the same name", use the node */ /* networkState: retrieved dynamically from GPAC Service */ /* preload: "must reflect the content of the attribute of the same name", use the node */ - GF_HTML_MediaTimeRanges buffered; + GF_HTML_MediaTimeRanges *buffered; /* ready state */ /* readyState: retrieved from GPAC Media Object dynamically */ Bool seeking; @@ -261,8 +264,8 @@ typedef struct char *startDate; Bool paused; double defaultPlaybackRate; - GF_HTML_MediaTimeRanges played; - GF_HTML_MediaTimeRanges seekable; + GF_HTML_MediaTimeRanges *played; + GF_HTML_MediaTimeRanges *seekable; /* ended: retrieved from the state of GPAC Media Object */ /* autoplay: "must reflect the content of the attribute of the same name", use the node */ /* loop: "must reflect the content of the attribute of the same name", use the node */ @@ -305,9 +308,13 @@ typedef struct /* * TimeRanges */ -GF_Err gf_media_time_ranges_add(GF_HTML_MediaTimeRanges *timeranges, double start, double end); +GF_HTML_MediaTimeRanges *gf_html_timeranges_new(u32 timescale); +GF_Err gf_html_timeranges_add_start(GF_HTML_MediaTimeRanges *timeranges, u64 start); +GF_Err gf_html_timeranges_add_end(GF_HTML_MediaTimeRanges *timeranges, u64 end); void gf_html_timeranges_reset(GF_HTML_MediaTimeRanges *range); void gf_html_timeranges_del(GF_HTML_MediaTimeRanges *range); +GF_HTML_MediaTimeRanges *gf_html_timeranges_intersection(GF_HTML_MediaTimeRanges *a, GF_HTML_MediaTimeRanges *b); +GF_HTML_MediaTimeRanges *gf_html_timeranges_union(GF_HTML_MediaTimeRanges *a, GF_HTML_MediaTimeRanges *b); /* * HTML5 TrackList @@ -331,7 +338,6 @@ void gf_html_track_del(GF_HTML_Track *track); */ GF_HTML_MediaElement *gf_html_media_element_new(GF_Node *media_node, GF_HTML_MediaController *mc); void gf_html_media_element_del(GF_HTML_MediaElement *me); -GF_DOMEventTarget *gf_html_media_get_event_target_from_node(GF_Node *n); void html_media_element_js_init(JSContext *c, JSObject *new_obj, GF_Node *n); diff --git a/include/gpac/html5_mse.h b/include/gpac/html5_mse.h index ce43312..c739882 100644 --- a/include/gpac/html5_mse.h +++ b/include/gpac/html5_mse.h @@ -66,29 +66,37 @@ typedef struct /* MSE defined properties */ Bool updating; - GF_HTML_MediaTimeRanges buffered; - double timestampOffset; + GF_HTML_MediaTimeRanges *buffered; + s64 timestampOffset; double appendWindowStart; double appendWindowEnd; u32 timescale; GF_HTML_MediaSource_AppendState append_state; Bool buffer_full_flag; + /* Mode used to append media data: + - "segments" uses the timestamps in the media, + - "sequence" ignores them and appends just after the previous data */ GF_HTML_MediaSource_AppendMode append_mode; - double group_start_timestamp; + + /* time (in timescale units) of the first frame in the group */ + u64 group_start_timestamp; Bool group_start_timestamp_flag; - double highest_end_timestamp; - Bool highest_end_timestamp_set; + /* time (in timescale units) of the frame end time (start + duration) in the group */ + u64 group_end_timestamp; + Bool group_end_timestamp_set; + Bool first_init_segment; - double remove_start; - double remove_end; + /* times (in timescale units) of the frames to be removed */ + u64 remove_start; + u64 remove_end; /* * GPAC internal objects */ - /* Media tracks associated to this source buffer */ + /* Media tracks (GF_HTML_Track) associated to this source buffer */ GF_List *tracks; /* Buffers to parse */ GF_List *input_buffer; @@ -107,6 +115,7 @@ typedef struct Bool parser_connected; /* Threads used to asynchronously parse the buffer and remove media data */ + GF_List *threads; GF_Thread *parser_thread; GF_Thread *remove_thread; @@ -178,16 +187,19 @@ void gf_mse_mediasource_del(GF_HTML_MediaSource *ms, Bool del_js); void gf_mse_mediasource_open(GF_HTML_MediaSource *ms, struct _mediaobj *mo); void gf_mse_mediasource_close(GF_HTML_MediaSource *ms); void gf_mse_mediasource_end(GF_HTML_MediaSource *ms); +void gf_mse_mediasource_add_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb); GF_HTML_SourceBuffer *gf_mse_source_buffer_new(GF_HTML_MediaSource *mediasource); +void gf_mse_source_buffer_set_timestampOffset(GF_HTML_SourceBuffer *sb, double d); +void gf_mse_source_buffer_set_timescale(GF_HTML_SourceBuffer *sb, u32 timescale); GF_Err gf_mse_source_buffer_load_parser(GF_HTML_SourceBuffer *sourcebuffer, const char *mime); -void gf_mse_add_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb); +GF_Err gf_mse_remove_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb); void gf_mse_source_buffer_del(GF_HTML_SourceBuffer *sb); GF_Err gf_mse_source_buffer_abort(GF_HTML_SourceBuffer *sb); void gf_mse_source_buffer_append_arraybuffer(GF_HTML_SourceBuffer *sb, GF_HTML_ArrayBuffer *buffer); void gf_mse_source_buffer_update_buffered(GF_HTML_SourceBuffer *sb); -u32 gf_mse_source_buffer_remove(void *par); - +void gf_mse_remove(GF_HTML_SourceBuffer *sb, double start, double end); + typedef struct { char *data; diff --git a/include/gpac/ietf.h b/include/gpac/ietf.h index e33f1c7..67afcf7 100644 --- a/include/gpac/ietf.h +++ b/include/gpac/ietf.h @@ -1076,6 +1076,7 @@ enum GF_RTP_PAYT_H264_SVC, /*use HEVC/H265 transport - no RFC yet, only draft*/ GF_RTP_PAYT_HEVC, + GF_RTP_PAYT_SHVC }; diff --git a/include/gpac/internal/compositor_dev.h b/include/gpac/internal/compositor_dev.h index e35b5d3..eae690d 100644 --- a/include/gpac/internal/compositor_dev.h +++ b/include/gpac/internal/compositor_dev.h @@ -99,9 +99,10 @@ typedef struct Bool npot_texture; Bool rect_texture; Bool point_sprite; - Bool vbo; + Bool vbo, pbo; u32 yuv_texture; Bool has_shaders; + s32 max_texture_size; } GLCaps; #endif @@ -166,6 +167,8 @@ struct __tag_compositor /*0: not init, 1: running, 2: exit requested, 3: done*/ u32 video_th_state; + u32 video_th_id; + /*compositor exclusive access to the scene and display*/ GF_Mutex *mx; @@ -185,11 +188,12 @@ struct __tag_compositor /*all textures (texture handlers)*/ GF_List *textures; -#ifdef GF_SR_EVENT_QUEUE + /*all textures to be destroyed (needed for openGL context ...)*/ + GF_List *textures_gc; + /*event queue*/ - GF_List *events; - GF_Mutex *ev_mx; -#endif + GF_List *event_queue, *event_queue_back; + GF_Mutex *evq_mx; Bool video_setup_failed; @@ -207,10 +211,11 @@ struct __tag_compositor Bool show_caret; Bool text_edit_changed; u32 scene_sampled_clock; - u32 last_click_time; u32 next_frame_delay; s32 frame_delay; + Bool video_frame_pending; + Bool fullscreen_postponed; /*display size*/ u32 display_width, display_height; @@ -256,7 +261,7 @@ struct __tag_compositor /*options*/ u32 aspect_ratio, antiAlias, texture_text_mode; Bool high_speed, stress_mode; - Bool was_opengl; + Bool is_opengl; Bool autoconfig_opengl; u32 force_opengl_2d; #ifdef OPENGL_RASTER @@ -475,6 +480,8 @@ struct __tag_compositor Bool disable_gl_cull; /*YUV textures in OpenGL are disabled (soft YUV->RGB )*/ Bool disable_yuvgl; + //use PBO to start pushing textures at the begining of the render pass + Bool enable_pbo; u32 default_navigation_mode; @@ -532,6 +539,17 @@ struct __tag_compositor #endif }; +typedef struct +{ + GF_Event evt; + GF_DOM_Event dom_evt; + GF_Node *node; + GF_DOMEventTarget *target; + GF_SceneGraph *sg; +} GF_QueuedEvent; + +void gf_sc_queue_dom_event(GF_Compositor *compositor, GF_Node *node, GF_DOM_Event *evt); +void gf_sc_queue_dom_event_on_target(GF_Compositor *compositor, GF_DOM_Event *evt, GF_DOMEventTarget *target, GF_SceneGraph *sg); /*base stack for timed nodes (nodes that activate themselves at given times) @UpdateTimeNode: shall be setup by the node handler and is called once per simulation frame @@ -639,6 +657,8 @@ GF_TextureHandler *gf_sc_texture_get_handler(GF_Node *n); /*returns 1 if url changed from current one*/ Bool gf_sc_texture_check_url_change(GF_TextureHandler *txh, MFURL *url); +/* opens associated object */ +GF_Err gf_sc_texture_open(GF_TextureHandler *txh, MFURL *url, Bool lock_scene_timeline); /*starts associated object*/ GF_Err gf_sc_texture_play(GF_TextureHandler *txh, MFURL *url); GF_Err gf_sc_texture_play_from_to(GF_TextureHandler *txh, MFURL *url, Double start_offset, Double end_offset, Bool can_loop, Bool lock_scene_timeline); @@ -651,6 +671,7 @@ void gf_sc_texture_update_frame(GF_TextureHandler *txh, Bool disable_resync); /*release video memory if needed*/ void gf_sc_texture_release_stream(GF_TextureHandler *txh); +void gf_sc_texture_cleanup_hw(GF_Compositor *compositor); /*sensor node handler - this is not defined as a stack because Anchor is both a grouping node and a @@ -1385,11 +1406,16 @@ GF_Err gf_sc_set_scene_size(GF_Compositor *compositor, u32 Width, u32 Height, Bo Bool gf_sc_use_raw_texture(GF_Compositor *compositor); -void gf_sc_get_av_caps(GF_Compositor *compositor, u32 *width, u32 *height, u32 *bpp, u32 *channels, u32 *sample_rate); +void gf_sc_get_av_caps(GF_Compositor *compositor, u32 *width, u32 *height, u32 *display_bit_depth, u32 *audio_bpp, u32 *channels, u32 *sample_rate); //signals the compositor a system frame is pending on a future frame void gf_sc_set_system_pending_frame(GF_Compositor *compositor, Bool frame_pending); +//indicates a video frame is pending - this is used fo decoders dispatching their internal memory in order to wake up the compositor asap +void gf_sc_set_video_pending_frame(GF_Compositor *compositor); + +Bool gf_sc_is_over(GF_Compositor *compositor, GF_SceneGraph *scene_graph); + #ifdef __cplusplus } #endif diff --git a/include/gpac/internal/isomedia_dev.h b/include/gpac/internal/isomedia_dev.h index 015f8a0..06a2b0c 100644 --- a/include/gpac/internal/isomedia_dev.h +++ b/include/gpac/internal/isomedia_dev.h @@ -164,6 +164,15 @@ enum GF_ISOM_BOX_TYPE_PSSH = GF_4CC( 'p', 's', 's', 'h' ), GF_ISOM_BOX_TYPE_TENC = GF_4CC( 't', 'e', 'n', 'c' ), + /*Adobe's protection boxes*/ + GF_ISOM_BOX_TYPE_ADKM = GF_4CC( 'a', 'd', 'k', 'm' ), + GF_ISOM_BOX_TYPE_AHDR = GF_4CC( 'a', 'h', 'd', 'r' ), + GF_ISOM_BOX_TYPE_ADAF = GF_4CC( 'a', 'd', 'a', 'f' ), + GF_ISOM_BOX_TYPE_APRM = GF_4CC( 'a', 'p', 'r', 'm' ), + GF_ISOM_BOX_TYPE_AEIB = GF_4CC( 'a', 'e', 'i', 'b' ), + GF_ISOM_BOX_TYPE_AKEY = GF_4CC( 'a', 'k', 'e', 'y' ), + GF_ISOM_BOX_TYPE_FLXS = GF_4CC( 'f', 'l', 'x', 's' ), + #ifndef GPAC_DISABLE_ISOM_FRAGMENTS /*Movie Fragments*/ GF_ISOM_BOX_TYPE_MVEX = GF_4CC( 'm', 'v', 'e', 'x' ), @@ -1549,6 +1558,7 @@ typedef struct struct __oma_kms_box *okms; struct __cenc_tenc_box *tenc; struct __piff_tenc_box *piff_tenc; + struct __adobe_drm_key_management_system_box *adkm; } GF_SchemeInformationBox; typedef struct __tag_protect_box @@ -2301,6 +2311,58 @@ GF_Err gf_isom_get_sample_cenc_info_ex(GF_TrackBox *trak, GF_TrackFragmentBox *t GF_Err senc_Parse(GF_BitStream *bs, GF_TrackBox *trak, GF_TrackFragmentBox *traf, GF_SampleEncryptionBox *ptr); +/* + Boxes for Adobe's protection scheme +*/ +typedef struct __adobe_enc_info_box +{ + GF_ISOM_FULL_BOX + char *enc_algo; /*spec: The encryption algorithm shall be 'AES-CBC'*/ + u8 key_length; +} GF_AdobeEncryptionInfoBox; + +typedef struct __adobe_flash_access_params_box +{ + GF_ISOM_BOX + char *metadata; /*base-64 encoded metadata used by the DRM client to retrieve decrypted key*/ +} GF_AdobeFlashAccessParamsBox; + +typedef struct __adobe_key_info_box +{ + GF_ISOM_FULL_BOX + GF_AdobeFlashAccessParamsBox * params; /*spec: APSParamsBox will no longer be produced by conformaing applications*/ +} GF_AdobeKeyInfoBox; + +typedef struct __adobe_std_enc_params_box +{ + GF_ISOM_FULL_BOX + GF_AdobeEncryptionInfoBox *enc_info; + GF_AdobeKeyInfoBox *key_info; +} GF_AdobeStdEncryptionParamsBox; + +typedef struct __adobe_drm_header_box +{ + GF_ISOM_FULL_BOX + GF_AdobeStdEncryptionParamsBox *std_enc_params; + //AdobeSignatureBox *signature; /*AdobeSignatureBox is not described*/ +} GF_AdobeDRMHeaderBox; + + +typedef struct __adobe_drm_au_format_box +{ + GF_ISOM_FULL_BOX + u8 selective_enc; + u8 IV_length; +} GF_AdobeDRMAUFormatBox; + +typedef struct __adobe_drm_key_management_system_box +{ + GF_ISOM_FULL_BOX + GF_AdobeDRMHeaderBox *header; + GF_AdobeDRMAUFormatBox *au_format; +} GF_AdobeDRMKeyManagementSystemBox; + + typedef struct { GF_ISOM_FULL_BOX @@ -2640,8 +2702,7 @@ GF_Err stbl_RemovePaddingBits(GF_SampleTableBox *stbl, u32 SampleNumber); GF_Err stbl_RemoveSampleFragments(GF_SampleTableBox *stbl, u32 sampleNumber); GF_Err stbl_RemoveRedundant(GF_SampleTableBox *stbl, u32 SampleNumber); -/*expands sampleGroup table for the given grouping type and sample_number. If sample_number is 0, just appends an entry at the end of the table*/ -GF_Err gf_isom_add_sample_group_entry(GF_List *sampleGroups, u32 sample_number, u32 grouping_type, u32 sampleGroupDescriptionIndex); +GF_Err gf_isom_copy_sample_group_entry_to_traf(GF_TrackFragmentBox *traf, GF_SampleTableBox *stbl, u32 grouping_type, u32 sampleGroupDescriptionIndex, Bool sgpd_in_traf); #ifndef GPAC_DISABLE_ISOM_FRAGMENTS GF_Err gf_isom_close_fragments(GF_ISOFile *movie); @@ -2679,7 +2740,9 @@ GF_Err traf_AddBox(GF_Box *s, GF_Box *a); /*rewrites avcC based on the given esd - this destroys the esd*/ GF_Err AVC_HEVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd); +void AVC_RewriteESDescriptorEx(GF_MPEGVisualSampleEntryBox *avc, GF_MediaBox *mdia); void AVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *avc); +void HEVC_RewriteESDescriptorEx(GF_MPEGVisualSampleEntryBox *avc, GF_MediaBox *mdia); void HEVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *avc); GF_Err reftype_AddRefTrack(GF_TrackReferenceTypeBox *ref, u32 trackID, u16 *outRefIndex); @@ -4061,6 +4124,67 @@ GF_Box *prft_New(); GF_Err prft_Read(GF_Box *s,GF_BitStream *bs); GF_Err prft_dump(GF_Box *a, FILE * trace); +//exported for sgpd comparison in traf merge +void sgpd_write_entry(u32 grouping_type, void *entry, GF_BitStream *bs); + +/* + Adobe's protection boxes +*/ + +GF_Box *adkm_New(); +void adkm_del(GF_Box *s); +GF_Err adkm_AddBox(GF_Box *s, GF_Box *a); +GF_Err adkm_Read(GF_Box *s, GF_BitStream *bs); +GF_Err adkm_Write(GF_Box *s, GF_BitStream *bs); +GF_Err adkm_Size(GF_Box *s); +GF_Err adkm_dump(GF_Box *a, FILE * trace); + +GF_Box *ahdr_New(); +void ahdr_del(GF_Box *s); +GF_Err ahdr_AddBox(GF_Box *s, GF_Box *a); +GF_Err ahdr_Read(GF_Box *s, GF_BitStream *bs); +GF_Err ahdr_Write(GF_Box *s, GF_BitStream *bs); +GF_Err ahdr_Size(GF_Box *s); +GF_Err ahdr_dump(GF_Box *a, FILE * trace); + +GF_Box *aprm_New(); +void aprm_del(GF_Box *s); +GF_Err aprm_AddBox(GF_Box *s, GF_Box *a); +GF_Err aprm_Read(GF_Box *s, GF_BitStream *bs); +GF_Err aprm_Write(GF_Box *s, GF_BitStream *bs); +GF_Err aprm_Size(GF_Box *s); +GF_Err aprm_dump(GF_Box *a, FILE * trace); + +GF_Box *aeib_New(); +void aeib_del(GF_Box *s); +GF_Err aeib_Read(GF_Box *s, GF_BitStream *bs); +GF_Err aeib_Write(GF_Box *s, GF_BitStream *bs); +GF_Err aeib_Size(GF_Box *s); +GF_Err aeib_dump(GF_Box *a, FILE * trace); + +GF_Box *akey_New(); +void akey_del(GF_Box *s); +GF_Err akey_AddBox(GF_Box *s, GF_Box *a); +GF_Err akey_Read(GF_Box *s, GF_BitStream *bs); +GF_Err akey_Write(GF_Box *s, GF_BitStream *bs); +GF_Err akey_Size(GF_Box *s); +GF_Err akey_dump(GF_Box *a, FILE * trace); + +GF_Box *flxs_New(); +void flxs_del(GF_Box *s); +GF_Err flxs_Read(GF_Box *s, GF_BitStream *bs); +GF_Err flxs_Write(GF_Box *s, GF_BitStream *bs); +GF_Err flxs_Size(GF_Box *s); +GF_Err flxs_dump(GF_Box *a, FILE * trace); + + +GF_Box *adaf_New(); +void adaf_del(GF_Box *s); +GF_Err adaf_Read(GF_Box *s, GF_BitStream *bs); +GF_Err adaf_Write(GF_Box *s, GF_BitStream *bs); +GF_Err adaf_Size(GF_Box *s); +GF_Err adaf_dump(GF_Box *a, FILE * trace); + #endif /*GPAC_DISABLE_ISOM*/ #ifdef __cplusplus diff --git a/include/gpac/internal/media_dev.h b/include/gpac/internal/media_dev.h index 710a3c8..a472411 100644 --- a/include/gpac/internal/media_dev.h +++ b/include/gpac/internal/media_dev.h @@ -300,6 +300,8 @@ typedef struct u32 num_units_in_tick, time_scale; Bool poc_proportional_to_timing_flag; u32 num_ticks_poc_diff_one_minus1; + + u32 rep_format_idx; } HEVC_SPS; typedef struct @@ -320,25 +322,50 @@ typedef struct u32 column_width[22], row_height[20]; } HEVC_PPS; +typedef struct RepFormat +{ + u32 chroma_format_idc; + u32 pic_width_luma_samples; + u32 pic_height_luma_samples; + u32 bit_depth_luma; + u32 bit_depth_chroma; + u8 separate_colour_plane_flag; +} HEVC_RepFormat; + typedef struct { u16 avg_bit_rate, max_bit_rate, avg_pic_rate; u8 constand_pic_rate_idc; } HEVC_RateInfo; + +#define MAX_SHVC_LAYERS 4 typedef struct { s32 id; /*used to discard repeated SPSs - 0: not parsed, 1 parsed, 2 stored*/ u32 state; u32 crc; - u8 max_sub_layer; + u32 max_layers, max_sub_layers, max_layer_id, num_layer_sets; Bool temporal_id_nesting; HEVC_ProfileTierLevel ptl; HEVC_SublayerPTL sub_ptl[8]; HEVC_RateInfo rates[8]; + + u32 scalability_mask[16]; + u32 dimension_id[MAX_SHVC_LAYERS][16]; + u32 layer_id_in_nuh[MAX_SHVC_LAYERS]; + u32 layer_id_in_vps[MAX_SHVC_LAYERS]; + + + u32 profile_level_tier_idx[MAX_SHVC_LAYERS]; + HEVC_ProfileTierLevel ext_ptl[MAX_SHVC_LAYERS]; + + u32 num_rep_formats; + HEVC_RepFormat rep_formats[16]; + u32 rep_format_idx[16]; } HEVC_VPS; typedef struct @@ -369,7 +396,7 @@ typedef struct HEVC_PPS *pps; } HEVCSliceInfo; -typedef struct +typedef struct _hevc_state { HEVC_SPS sps[16]; /* range allowed in the spec is 0..15 */ s8 sps_active_idx; /*currently active sps; must be initalized to -1 in order to discard not yet decodable SEIs*/ @@ -397,6 +424,7 @@ s32 gf_media_hevc_parse_nalu(GF_BitStream *bs, HEVCState *hevc, u8 *nal_unit_typ Bool gf_media_hevc_slice_is_intra(HEVCState *hevc); Bool gf_media_hevc_slice_is_IDR(HEVCState *hevc); +GF_Err gf_hevc_get_sps_info_with_state(HEVCState *hevc_state, char *sps_data, u32 sps_size, u32 *sps_id, u32 *width, u32 *height, s32 *par_n, s32 *par_d); #endif /*GPAC_DISABLE_AV_PARSERS*/ diff --git a/include/gpac/internal/mpd.h b/include/gpac/internal/mpd.h index 616e2c4..3eb9848 100644 --- a/include/gpac/internal/mpd.h +++ b/include/gpac/internal/mpd.h @@ -304,6 +304,7 @@ typedef struct { GF_MPD_Type type; u64 availabilityStartTime; /* expressed in milliseconds */ /*MANDATORY if type=dynamic*/ u64 availabilityEndTime;/* expressed in milliseconds */ + u64 publishTime;/* expressed in milliseconds */ u32 media_presentation_duration; /* expressed in milliseconds */ /*MANDATORY if type=static*/ u32 minimum_update_period; /* expressed in milliseconds */ u32 min_buffer_time; /* expressed in milliseconds */ /*MANDATORY*/ diff --git a/include/gpac/internal/reedsolomon.h b/include/gpac/internal/reedsolomon.h index 1923f47..891450c 100644 --- a/include/gpac/internal/reedsolomon.h +++ b/include/gpac/internal/reedsolomon.h @@ -47,7 +47,7 @@ extern int pBytes[MAXDEG]; extern int synBytes[MAXDEG]; /* print debugging info */ -extern int DEBUG; +extern int RS_DEBUG; /* Reed Solomon encode/decode routines */ void initialize_ecc (void); diff --git a/include/gpac/internal/scenegraph_dev.h b/include/gpac/internal/scenegraph_dev.h index da4abc0..ae9f6d5 100644 --- a/include/gpac/internal/scenegraph_dev.h +++ b/include/gpac/internal/scenegraph_dev.h @@ -178,8 +178,6 @@ struct __tag_scene_graph /*real scene time callback*/ Double (*GetSceneTime)(void *userpriv); - - /*parent scene if any*/ struct __tag_scene_graph *parent_scene; @@ -1127,7 +1125,7 @@ void gf_dom_event_target_del(GF_DOMEventTarget *target); GF_Err gf_dom_event_remove_listener_from_parent(GF_DOMEventTarget *event_target, GF_Node *listener); /* returns associated DOMEventtarget for an HTML/SVG media element, or NULL otherwise*/ -GF_DOMEventTarget *gf_html_media_get_event_target_from_node(GF_Node *n); +GF_DOMEventTarget *gf_dom_event_get_target_from_node(GF_Node *n); #ifdef __cplusplus } diff --git a/include/gpac/internal/smjs_api.h b/include/gpac/internal/smjs_api.h index 5bf5ca7..9e29f28 100644 --- a/include/gpac/internal/smjs_api.h +++ b/include/gpac/internal/smjs_api.h @@ -34,6 +34,10 @@ #endif #endif +#if defined(DEBUG) && defined(GPAC_CONFIG_DARWIN) +#undef DEBUG +#endif + #include #ifndef JS_VERSION diff --git a/include/gpac/internal/terminal_dev.h b/include/gpac/internal/terminal_dev.h index 855a3b3..34f080d 100644 --- a/include/gpac/internal/terminal_dev.h +++ b/include/gpac/internal/terminal_dev.h @@ -47,6 +47,7 @@ typedef struct _object_clock GF_Clock; typedef struct _es_channel GF_Channel; typedef struct _generic_codec GF_Codec; typedef struct _composition_memory GF_CompositionMemory; +typedef struct _gf_addon_media GF_AddonMedia; struct _net_service @@ -58,6 +59,8 @@ struct _net_service struct _tag_terminal *term; /*service url*/ char *url; + /*service mime type*/ + char *mime; /*od_manager owning service, NULL for services created for remote channels*/ struct _od_manager *owner; /*number of attached remote channels ODM (ESD URLs)*/ @@ -189,6 +192,7 @@ struct _scene /*URLs of current video, audio and subs (we can't store objects since they may be destroyed when seeking)*/ SFURL visual_url, audio_url, text_url, dims_url; + Bool end_of_scene; #ifndef GPAC_DISABLE_VRML /*list of externproto libraries*/ GF_List *extern_protos; @@ -208,6 +212,10 @@ struct _scene /*list of M_KeyNavigator nodes*/ GF_List *keynavigators; #endif + + + GF_AddonMedia *active_addon; + GF_List *declared_addons; }; GF_Scene *gf_scene_new(GF_Scene *parentScene); @@ -245,7 +253,7 @@ void gf_scene_force_size_to_video(GF_Scene *scene, GF_MediaObject *mo); Bool gf_scene_check_clocks(GF_ClientService *ns, GF_Scene *scene); -void gf_scene_notify_event(GF_Scene *scene, u32 event_type, GF_Node *n, void *dom_evt, GF_Err code); +void gf_scene_notify_event(GF_Scene *scene, u32 event_type, GF_Node *n, void *dom_evt, GF_Err code, Bool no_queueing); void gf_scene_mpeg4_inline_restart(GF_Scene *scene); @@ -622,7 +630,7 @@ struct _es_channel /*signal that next AU is an AU start*/ Bool NextIsAUStart; /*if codec resilient, packet drops are not considered as fatal for AU reconstruction (eg no wait for RAP)*/ - Bool codec_resilient; + u32 codec_resilient; /*when starting a channel, the first AU is ALWAYS fetched when buffering - this forces BIFS and OD to be decoded and first frame render, in order to detect media objects that would also need buffering - note this doesn't affect the clock, it is still paused if buffering*/ @@ -682,7 +690,6 @@ struct _es_channel Bool last_au_was_seek; Bool no_timestamps; - u32 prev_aggregated_dts; Bool pull_forced_buffer; @@ -804,6 +811,8 @@ struct _generic_codec u8 last_unit_signature[20]; /*in case the codec performs temporal re-ordering itself*/ Bool is_reordering; + /*codec will properly handle CTS adjustments*/ + Bool trusted_cts; u32 prev_au_size; u32 bytes_per_sec; Double fps; @@ -813,7 +822,8 @@ struct _generic_codec /*statistics*/ u32 last_stat_start, cur_bit_size, tot_bit_size, stat_start; u32 avg_bit_rate, max_bit_rate; - u32 total_dec_time, nb_dec_frames, max_dec_time; + u32 nb_dec_frames; + u64 total_dec_time, max_dec_time; u32 first_frame_time, last_frame_time; /*number of frames dropped at the presentation*/ u32 nb_droped; @@ -955,6 +965,14 @@ struct _od_manager /*the media sensor(s) attached to this object*/ GF_List *ms_stack; #endif + + //only set on root OD of addon subscene, which gather all the hybrid resources + GF_AddonMedia *addon; + //set to true if this is a scalable addon for an existing object + Bool scalable_addon; + + //for a regular ODM, this indicates that the current scalable_odm associated + struct _od_manager *scalable_odm; }; @@ -1037,7 +1055,7 @@ struct _mediaobj /*frame presentation time*/ u32 timestamp; /*time in ms until next frame shall be presented*/ - u32 ms_until_next; + s32 ms_until_next; s32 ms_until_pres; /*data frame size*/ u32 framesize; @@ -1083,8 +1101,39 @@ u32 gf_mo_get_od_id(MFURL *url); void gf_scene_generate_views(GF_Scene *scene, char *url, char *parent_url); +void gf_scene_register_associated_media(GF_Scene *scene, GF_AssociatedContentLocation *addon_info); +void gf_scene_notify_associated_media_timeline(GF_Scene *scene, GF_AssociatedContentTiming *addon_time); +u32 gf_scene_adjust_time_for_addon(GF_Scene *scene, u32 clock_time, GF_AddonMedia *addon); +u64 gf_scene_adjust_timestamp_for_addon(GF_Scene *scene, u64 orig_ts, GF_AddonMedia *addon); +void gf_scene_select_scalable_addon(GF_Scene *scene, GF_ObjectManager *odm); + +struct _gf_addon_media +{ + char *url; + GF_ObjectManager *root_od; + s32 timeline_id; + u32 is_splicing; + //in scene time + Double activation_time; + + Bool enabled; + Bool timeline_ready; + + u32 media_timescale; + u64 media_timestamp; + u64 media_pts; + + //0: not scalable + //1: layered coding scalable enhancement (reassembly before the decoder) + //2: view enhancement (reassembly after the decoder) + u32 scalable_type; +}; + GF_Err gf_codec_process_private_media(GF_Codec *codec, u32 TimeAvailable); + +Bool gf_codec_is_scene_or_image(GF_Codec *codec); + #ifdef __cplusplus } #endif diff --git a/include/gpac/ismacryp.h b/include/gpac/ismacryp.h index 78fb2df..4140d2c 100644 --- a/include/gpac/ismacryp.h +++ b/include/gpac/ismacryp.h @@ -59,7 +59,7 @@ enum typedef struct { - /*0: ISMACryp - 1: OMA DRM - 2: CENC CTR - 3: CENC CBC*/ + /*0: ISMACryp - 1: OMA DRM - 2: CENC CTR - 3: CENC CBC - 4: ADOBE*/ u32 enc_type; u32 trackID; unsigned char key[16]; @@ -99,6 +99,9 @@ typedef struct u32 defaultKeyIdx; u32 keyRoll; + char metadata[5000]; + u32 metadata_len; + } GF_TrackCryptInfo; #if !defined(GPAC_DISABLE_MCRYPT) && !defined(GPAC_DISABLE_ISOM_WRITE) @@ -117,6 +120,10 @@ GF_Err gf_cenc_decrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*pro GF_Err gf_cbc_encrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); GF_Err gf_cbc_decrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); +/*ADOBE*/ +GF_Err gf_adobe_encrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); +GF_Err gf_adobe_decrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); + GF_Err (*gf_encrypt_track)(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); GF_Err (*gf_decrypt_track)(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk); diff --git a/include/gpac/isomedia.h b/include/gpac/isomedia.h index 6faee67..33925af 100644 --- a/include/gpac/isomedia.h +++ b/include/gpac/isomedia.h @@ -167,7 +167,10 @@ enum GF_ISOM_CENC_SCHEME = GF_4CC('c','e','n','c'), /* Encryption Scheme Type in the SchemeTypeInfoBox */ - GF_ISOM_CBC_SCHEME = GF_4CC('c','b','c','1') + GF_ISOM_CBC_SCHEME = GF_4CC('c','b','c','1'), + + /* Encryption Scheme Type in the SchemeTypeInfoBox */ + GF_ISOM_ADOBE_SCHEME = GF_4CC('a','d','k','m'), }; @@ -312,6 +315,7 @@ typedef struct 0: not random access 1: regular RAP, 2: sample is a redundant RAP. If set when adding the sample, this will create a sample dependency entry + 3: specific RAP (CRA/BLA in HEVC) */ u8 IsRAP; } GF_ISOSample; @@ -1970,6 +1974,10 @@ GF_Err gf_isom_remove_samp_enc_box(GF_ISOFile *the_file, u32 trackNumber); GF_Err gf_isom_remove_samp_group_box(GF_ISOFile *the_file, u32 trackNumber); GF_Err gf_isom_remove_pssh_box(GF_ISOFile *the_file); +Bool gf_isom_is_adobe_protection_media(GF_ISOFile *the_file, u32 trackNumber, u32 sampleDescriptionIndex); +GF_Err gf_isom_get_adobe_protection_info(GF_ISOFile *the_file, u32 trackNumber, u32 sampleDescriptionIndex, u32 *outOriginalFormat, u32 *outSchemeType, u32 *outSchemeVersion); +GF_Err gf_isom_set_adobe_protection(GF_ISOFile *the_file, u32 trackNumber, u32 desc_index, u32 scheme_type, u32 scheme_version, Bool is_selective_enc, char *metadata, u32 len); + void gf_isom_ipmpx_remove_tool_list(GF_ISOFile *the_file); #endif /*GPAC_DISABLE_ISOM_WRITE*/ @@ -2262,7 +2270,7 @@ GF_Err gf_isom_add_subsample(GF_ISOFile *movie, u32 track, u32 sampleNumber, u32 GF_Err gf_isom_fragment_add_subsample(GF_ISOFile *movie, u32 TrackID, u32 subSampleSize, u8 priority, u32 reserved, Bool discardable); /*copy over the subsample and sampleToGroup information of the given sample from the source track/file to the last sample added to the current track fragment of the destination file*/ -GF_Err gf_isom_fragment_copy_subsample(GF_ISOFile *dest, u32 TrackID, GF_ISOFile *orig, u32 track, u32 sampleNumber); +GF_Err gf_isom_fragment_copy_subsample(GF_ISOFile *dest, u32 TrackID, GF_ISOFile *orig, u32 track, u32 sampleNumber, Bool sgpd_in_traf); /*gets the number of the next moof to be produced*/ u32 gf_isom_get_next_moof_number(GF_ISOFile *movie); diff --git a/include/gpac/math.h b/include/gpac/math.h index 480448e..43e0a7d 100644 --- a/include/gpac/math.h +++ b/include/gpac/math.h @@ -37,10 +37,7 @@ extern "C" { #include -/*NOTE: there is a conflict on Win32 VC6 with C++ and gpac headers when including */ -#if !defined(__cplusplus) || defined(__SYMBIAN32__) #include -#endif /*! diff --git a/include/gpac/media_tools.h b/include/gpac/media_tools.h index 7cb9df8..6ba223d 100644 --- a/include/gpac/media_tools.h +++ b/include/gpac/media_tools.h @@ -47,6 +47,7 @@ GF_ESD *gf_media_map_esd(GF_ISOFile *mp4, u32 track); #ifndef GPAC_DISABLE_ISOM_WRITE /*changes pixel aspect ratio for visual tracks if supported. Negative values remove any PAR info*/ GF_Err gf_media_change_par(GF_ISOFile *file, u32 track, s32 ar_num, s32 ar_den); +GF_Err gf_media_remove_non_rap(GF_ISOFile *file, u32 track); #endif @@ -223,6 +224,7 @@ typedef struct __track_import u32 nb_tracks; /*track info after probing (GF_IMPORT_PROBE_ONLY set).*/ struct __track_import_info tk_info[GF_IMPORT_MAX_TRACKS]; + u64 probe_duration; /*for MPEG-TS and similar: program names*/ u32 nb_progs; @@ -312,7 +314,7 @@ GF_Err gf_dasher_segment_files(const char *mpd_name, GF_DashSegmenterInput *inpu Bool segments_start_with_rap, Double dash_duration_sec, char *seg_rad_name, char *seg_ext, u32 segment_marker_4cc, Double frag_duration_sec, s32 subsegs_per_sidx, Bool daisy_chain_sidx, Bool fragments_start_with_rap, const char *tmp_dir, GF_Config *dash_ctx, u32 dash_dynamic, u32 mpd_update_time, u32 time_shift_depth, Double subduration, Double min_buffer, - u32 ast_shift_sec, u32 dash_scale, Bool fragments_in_memory, u32 initial_moof_sn, u64 initial_tfdt, Bool no_fragments_defaults, Bool pssh_moof); + u32 ast_shift_sec, u32 dash_scale, Bool fragments_in_memory, u32 initial_moof_sn, u64 initial_tfdt, Bool no_fragments_defaults, Bool pssh_moof, Bool samplegroups_in_traf); /*returns time to wait until end of currently generated segments*/ u32 gf_dasher_next_update_time(GF_Config *dash_ctx, u32 mpd_update_time); diff --git a/include/gpac/mediaobject.h b/include/gpac/mediaobject.h index 7d888a2..a6b8e27 100644 --- a/include/gpac/mediaobject.h +++ b/include/gpac/mediaobject.h @@ -97,13 +97,15 @@ GF_Err gf_mo_get_raw_image_planes(GF_MediaObject *mo, u8 **pY_or_RGB, u8 **pU, u /*fetch media data */ -char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, u32 *ms_until_next); +char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, s32 *ms_until_next); -/*release given amount of media data - nb_bytes is used for audio - if forceDrop is set, the unlocked frame will be -droped if all bytes are consumed, otherwise it will be droped based on object time - typically, video fetches with the resync -flag set and release without forceDrop, while audio fetches without resync but forces buffer drop. If forceDrop is set to 2, -the frame will be stated as a discraded frame*/ -void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop); +/*release given amount of media data - nb_bytes is used for audio - drop_mode can take the following values: +-1: do not drop +0: do not force drop: the unlocked frame it will be droped based on object time (typically video) +1: force drop : the unlocked frame will be droped if all bytes are consumed (typically audio) +2: the frame will be stated as a discraded frame +*/ +void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 drop_mode); /*get media time*/ void gf_mo_get_media_time(GF_MediaObject *mo, u32 *media_time, u32 *media_dur); /*get object clock*/ diff --git a/include/gpac/modules/codec.h b/include/gpac/modules/codec.h index 6fe441b..6edf8b2 100644 --- a/include/gpac/modules/codec.h +++ b/include/gpac/modules/codec.h @@ -53,6 +53,16 @@ enum GF_CODEC_LEVEL_SEEK }; + +/*codec resilience type*/ +enum +{ + GF_CODEC_NOT_RESILIENT=0, + GF_CODEC_RESILIENT_ALWAYS=1, + GF_CODEC_RESILIENT_AFTER_FIRST_RAP=2 +}; + + /*the structure for capabilities*/ typedef struct { @@ -75,7 +85,11 @@ enum /*size of a single composition unit */ GF_CODEC_OUTPUT_SIZE = 0x01, /*resilency: if packets are lost within an AU, resilience means the AU won't be discarded and the codec - will try to decode */ + will try to decode + 0: not resilient + 1: resilient + 2: resilient after first rap + */ GF_CODEC_RESILIENT, /*critical level of composition memory - if below, media management for the object */ GF_CODEC_BUFFER_MIN, @@ -100,9 +114,15 @@ enum GF_CODEC_PAR, /*video color mode - color modes are defined in constants.h*/ GF_CODEC_PIXEL_FORMAT, - /*isgnal decoder performs frame re-ordering in temporal scalability*/ + /*signal decoder performs frame re-ordering in temporal scalability*/ GF_CODEC_REORDER, - + /*signal decoder can safely handle CTS when outputing a picture. If not supported by the + decoder, the terminal will automatically handle CTS adjustments*/ + GF_CODEC_TRUSTED_CTS, + + /*set cap only, indicate smax bpp of display*/ + GF_CODEC_DISPLAY_BPP, + /*Audio sample rate*/ GF_CODEC_SAMPLERATE, /*Audio num channels*/ @@ -127,7 +147,7 @@ enum after the last AU). Otherwise the decoder will be stopped and ask to remove any extra scene being displayed*/ GF_CODEC_MEDIA_NOT_OVER, - /*switches up or down media quality for scalable coding*/ + /*switches up (1), max (2), down (0) or min (-1) media quality for scalable coding. */ GF_CODEC_MEDIA_SWITCH_QUALITY, /*special cap indicating the codec should abort processing as soon as possible because it is about to be destroyed*/ @@ -188,7 +208,7 @@ typedef struct _basedecoder } GF_BaseDecoder; /*interface name and version for media decoder */ -#define GF_MEDIA_DECODER_INTERFACE GF_4CC('G', 'M', 'D', '2') +#define GF_MEDIA_DECODER_INTERFACE GF_4CC('G', 'M', 'D', '3') /*the media module interface. A media module MUST be implemented in synchronous mode as time and resources management is done by the terminal*/ @@ -206,7 +226,7 @@ typedef struct _mediadecoder @mmlevel: speed indicator for the decoding - cf above for values*/ GF_Err (*ProcessData)(struct _mediadecoder *, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel); diff --git a/include/gpac/modules/service.h b/include/gpac/modules/service.h index 0e4d5c0..5bacc8e 100644 --- a/include/gpac/modules/service.h +++ b/include/gpac/modules/service.h @@ -118,14 +118,21 @@ typedef enum GF_NET_SERVICE_QUERY_INIT_RANGE, /* When using proxy between an input module and the terminal, exchange status using this command: input -> proxy */ GF_NET_SERVICE_STATUS_PROXY, - /*When using DASH or playlists, indicates that DATA (chunk) has been received by the playlist proxy*/ - GF_NET_SERVICE_PROXY_CHUNK_RECEIVE, - /*When using DASH or playlists, indicates that complete segment has been received by the playlist proxy*/ - GF_NET_SERVICE_PROXY_SEGMENT_RECEIVE, + + /*When using DASH or playlists, indicates that DATA (chunk or complete file ) has been received by the playlist proxy*/ + GF_NET_SERVICE_PROXY_DATA_RECEIVE, /*query screen capabilities*/ GF_NET_SERVICE_MEDIA_CAP_QUERY, + /*signal an associated content is announced (from service to term). This does not have to be filtered by the service + the terminal will handle this.*/ + GF_NET_ASSOCIATED_CONTENT_LOCATION, + /*signal associated content timeline (from service to term)*/ + GF_NET_ASSOCIATED_CONTENT_TIMING, + + //sets nalu mode + GF_NET_CHAN_NALU_MODE, } GF_NET_CHAN_CMD; /*channel command for all commands that don't need params: @@ -217,6 +224,16 @@ typedef struct u32 padding_bytes; } GF_NetComPadding; + +/*GF_NET_SERVICE_PROXY_DATA_RECEIVE*/ +typedef struct +{ + u32 command_type; + LPNETCHANNEL on_channel; + Bool is_chunk; + Bool is_live; +} GF_NetComProxyData; + /*GF_NET_CHAN_MAP_TIME*/ typedef struct { @@ -411,7 +428,9 @@ typedef struct LPNETCHANNEL channel; u32 width; u32 height; - u32 bpp; + //max bits per color channel + u32 display_bit_depth; + u32 audio_bpp; u32 channels; u32 sample_rate; const char *mime_query; @@ -420,6 +439,47 @@ typedef struct Bool mime_supported; } GF_MediaCapQuery; + +/*GF_NET_ASSOCIATED_CONTENT_LOCATION*/ +typedef struct +{ + u32 command_type; + LPNETCHANNEL channel; + + s32 timeline_id; + const char *external_URL; + Bool is_announce, is_splicing; + Bool reload_external; + Double activation_countdown; +} GF_AssociatedContentLocation; + +/*GF_NET_ASSOCIATED_CONTENT_TIMING*/ +typedef struct +{ + u32 command_type; + LPNETCHANNEL channel; + + u32 timeline_id; + u32 media_timescale; + u64 media_timestamp; + //for now only used in MPEG-2, so media_pts is in 90khz scale + u64 media_pts; + Bool force_reload; + Bool is_paused; + Bool is_discontinuity; +} GF_AssociatedContentTiming; + +/*GF_NET_CHAN_NALU_MODE*/ +typedef struct +{ + u32 command_type; + LPNETCHANNEL channel; + + //mode 0: extract in ISOBMF format (nalu size field + nalu) + //mode 1: extract in Annex B format (start code + nalu) + u32 extract_mode; +} GF_NALUExtractMode; + typedef union __netcommand { GF_NET_CHAN_CMD command_type; @@ -442,6 +502,10 @@ typedef union __netcommand GF_NetQualitySwitch switch_quality; GF_NetServiceStatus status; GF_MediaCapQuery mcaps; + GF_NetComProxyData proxy_data; + GF_AssociatedContentLocation addon_info; + GF_AssociatedContentTiming addon_time; + GF_NALUExtractMode nalu_mode; } GF_NetworkCommand; /* diff --git a/include/gpac/modules/video_out.h b/include/gpac/modules/video_out.h index 3376517..96eefc9 100644 --- a/include/gpac/modules/video_out.h +++ b/include/gpac/modules/video_out.h @@ -212,6 +212,8 @@ typedef struct _video_out u32 max_screen_width, max_screen_height; /* dpi of the screen*/ u32 dpi_x, dpi_y; + /* max bits per color channel*/ + u32 max_screen_bpp; /*overlay color key used by the hardware bliter - if not set, only top-level overlay can be used*/ u32 overlay_color_key; diff --git a/include/gpac/mpeg4_odf.h b/include/gpac/mpeg4_odf.h index 89bcc76..245cc80 100644 --- a/include/gpac/mpeg4_odf.h +++ b/include/gpac/mpeg4_odf.h @@ -922,7 +922,7 @@ typedef struct GF_List *param_array; - //set by libisomedia at impport/export time + //set by libisomedia at import/export time Bool is_shvc; //used in SHVC config diff --git a/include/gpac/mpegts.h b/include/gpac/mpegts.h index 067e2e3..b97c7cc 100644 --- a/include/gpac/mpegts.h +++ b/include/gpac/mpegts.h @@ -60,6 +60,9 @@ enum GF_M2TS_MPEG4_SL_DESCRIPTOR = 0x1E, GF_M2TS_MPEG4_FMC_DESCRIPTOR = 0x1F, /* ... */ + GF_M2TS_METADATA_POINTER_DESCRIPTOR = 0x25, + GF_M2TS_METADATA_DESCRIPTOR = 0x26, + /* ... */ GF_M2TS_AVC_VIDEO_DESCRIPTOR = 0x28, /* ... */ GF_M2TS_AVC_TIMING_HRD_DESCRIPTOR = 0x2A, @@ -207,6 +210,8 @@ enum GF_M2TS_SYSTEMS_MPEG4_PES = 0x12, GF_M2TS_SYSTEMS_MPEG4_SECTIONS = 0x13, + GF_M2TS_METADATA_PES = 0x15, + GF_M2TS_VIDEO_H264 = 0x1B, GF_M2TS_VIDEO_SVC = 0x1F, GF_M2TS_VIDEO_HEVC = 0x24, @@ -225,6 +230,7 @@ enum GF_M2TS_DVB_TELETEXT = 0x152, GF_M2TS_DVB_VBI = 0x153, GF_M2TS_DVB_SUBTITLE = 0x154, + GF_M2TS_METADATA_ID3_HLS = 0x155, }; @@ -285,7 +291,9 @@ enum /*skip pes processing: all transport packets related to this stream are discarded*/ GF_M2TS_PES_FRAMING_SKIP, /*same as GF_M2TS_PES_FRAMING_SKIP but keeps internal PES buffer alive*/ - GF_M2TS_PES_FRAMING_SKIP_NO_RESET + GF_M2TS_PES_FRAMING_SKIP_NO_RESET, + /*same as defualt PES framing but forces nal-per-nal dispatch for AVC/HEVC (default mode may dispatch complete frames)*/ + GF_M2TS_PES_FRAMING_DEFAULT_NAL, }; /*PES packet flags*/ @@ -375,8 +383,13 @@ enum GF_M2TS_EVT_AIT_FOUND, /*DSCM-CC has been found (carousel) */ GF_M2TS_EVT_DSMCC_FOUND, + + /*a TEMI locator has been found or repeated*/ + GF_M2TS_EVT_TEMI_LOCATION, + /*a TEMI timecode has been found*/ + GF_M2TS_EVT_TEMI_TIMECODE, + GF_M2TS_EVT_EOS, - }; enum @@ -448,6 +461,50 @@ typedef struct GF_M2TS_SectionFilter gf_m2ts_section_callback process_section; } GF_M2TS_SectionFilter; +enum metadata_carriage { + METADATA_CARRIAGE_SAME_TS = 0, + METADATA_CARRIAGE_DIFFERENT_TS = 1, + METADATA_CARRIAGE_PS = 2, + METADATA_CARRIAGE_OTHER = 3 +}; + +typedef struct tag_m2ts_metadata_pointer_descriptor { + u16 application_format; + u32 application_format_identifier; + u8 format; + u32 format_identifier; + u8 service_id; + Bool locator_record_flag; + u32 locator_length; + char *locator_data; + enum metadata_carriage carriage_flag; + u16 program_number; + u16 ts_location; + u16 ts_id; + char *data; + u32 data_size; +} GF_M2TS_MetadataPointerDescriptor; + +typedef struct +{ + u32 timeline_id; + //for now we only support one URL announcement + const char *external_URL; + Bool is_announce, is_splicing; + Bool reload_external; + Double activation_countdown; +} GF_M2TS_TemiLocationDescriptor; + +typedef struct +{ + u32 timeline_id; + u32 media_timescale; + u64 media_timestamp; + u64 pes_pts; + Bool force_reload; + Bool is_paused; + Bool is_discontinuity; +} GF_M2TS_TemiTimecodeDescriptor; /*MPEG-2 TS program object*/ @@ -481,6 +538,8 @@ typedef struct u32 pid_playing; Bool is_scalable; + + GF_M2TS_MetadataPointerDescriptor *metadata_pointer_descriptor; } GF_M2TS_Program; /*ES flags*/ @@ -565,6 +624,23 @@ typedef struct tag_m2ts_dvb_teletext u8 page_number; } GF_M2TS_DVB_Teletext_Descriptor; +typedef struct tag_m2ts_metadata_descriptor { + u16 application_format; + u32 application_format_identifier; + u8 format; + u32 format_identifier; + u8 service_id; + u8 decoder_config_flags; + Bool dsmcc_flag; + u8 service_id_record_length; + char *service_id_record; + u8 decoder_config_length; + char *decoder_config; + u8 decoder_config_id_length; + char *decoder_config_id; + u8 decoder_config_service_id; +} GF_M2TS_MetadataDescriptor; + /*MPEG-2 TS ES object*/ typedef struct tag_m2ts_pes { @@ -583,10 +659,12 @@ typedef struct tag_m2ts_pes /*mpegts lib private - do not touch :)*/ /*PES re-assembler*/ - unsigned char *data; + unsigned char *pck_data; + /*amount of bytes allocated for data */ + u32 pck_alloc_len; /*amount of bytes received in the current PES packet (NOT INCLUDING ANY PENDING BYTES)*/ - u32 data_len; - /*size of the PES packet being recevied*/ + u32 pck_data_len; + /*size of the PES packet being received, as indicated in pes header length field - can be 0 if unknown*/ u32 pes_len; Bool rap; u64 PTS, DTS; @@ -609,8 +687,9 @@ typedef struct tag_m2ts_pes /*PES reframer - if NULL, pes processing is skiped*/ /*returns the number of bytes NOT consummed from the input data buffer - these bytes are kept when reassembling the next PES packet*/ - u32 (*reframe)(struct tag_m2ts_demux *ts, struct tag_m2ts_pes *pes, Bool same_pts, unsigned char *data, u32 data_len); + u32 (*reframe)(struct tag_m2ts_demux *ts, struct tag_m2ts_pes *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *hdr); + Bool single_nal_mode; /*used by several reframers to store their parsing state*/ u32 frame_state; /*LATM stuff - should be moved out of mpegts*/ @@ -619,6 +698,11 @@ typedef struct tag_m2ts_pes u64 prev_PTS; GF_M2TS_DVB_Subtitling_Descriptor sub; + GF_M2TS_MetadataDescriptor *metadata_descriptor; + + + char *temi_tc_desc; + u32 temi_tc_desc_len, temi_tc_desc_alloc_size; } GF_M2TS_PES; /*SDT information object*/ @@ -971,6 +1055,7 @@ typedef struct __m2ts_mux_pck u32 data_len; u32 flags; u64 cts, dts; + u32 duration; char *mpeg2_af_descriptors; u32 mpeg2_af_descriptors_size; @@ -1087,6 +1172,7 @@ struct __m2ts_mux_program { u32 last_sys_clock; u64 initial_ts; Bool initial_ts_set; + Bool pcr_init_time_set; u32 pcr_offset; GF_Descriptor *iod; diff --git a/include/gpac/scenegraph.h b/include/gpac/scenegraph.h index ca5547d..05322e8 100644 --- a/include/gpac/scenegraph.h +++ b/include/gpac/scenegraph.h @@ -386,6 +386,8 @@ enum /*function called when the a "set dirty" propagates to root node of the graph ctxdata is not used*/ GF_SG_CALLBACK_GRAPH_DIRTY, + //node is being destroyed + GF_SG_CALLBACK_NODE_DESTROY, }; /*set node callback: function called upon node creation. diff --git a/include/gpac/scenegraph_svg.h b/include/gpac/scenegraph_svg.h index c46fa62..0e295d3 100644 --- a/include/gpac/scenegraph_svg.h +++ b/include/gpac/scenegraph_svg.h @@ -258,7 +258,6 @@ typedef enum { GF_DOM_EVENT_TARGET_NODE, GF_DOM_EVENT_TARGET_DOCUMENT, - GF_DOM_EVENT_TARGET_HTML_MEDIA, GF_DOM_EVENT_TARGET_MSE_MEDIASOURCE, GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFERLIST, GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFER, @@ -354,7 +353,7 @@ typedef struct BE CAREFULL: event execution may very well destroy ANY node, especially the event target node !! */ Bool gf_dom_event_fire(GF_Node *node, GF_DOM_Event *event); -Bool sg_fire_dom_event(GF_DOMEventTarget *et, GF_DOM_Event *event, GF_SceneGraph *sg, GF_Node *n); +Bool gf_sg_fire_dom_event(GF_DOMEventTarget *et, GF_DOM_Event *event, GF_SceneGraph *sg, GF_Node *n); /*fires event on the specified node BE CAREFULL: event execution may very well destroy ANY node, especially the event target node !! diff --git a/include/gpac/setup.h b/include/gpac/setup.h index 6b06855..aef139b 100644 --- a/include/gpac/setup.h +++ b/include/gpac/setup.h @@ -46,6 +46,9 @@ This is only needed when building libgpac and modules when libgpac is not instal #include #include +#if defined(_WIN64) && !defined(GPAC_64_BITS) +#define GPAC_64_BITS +#endif typedef unsigned __int64 u64; typedef unsigned int u32; diff --git a/include/gpac/sync_layer.h b/include/gpac/sync_layer.h index 4dc2e9b..3035666 100644 --- a/include/gpac/sync_layer.h +++ b/include/gpac/sync_layer.h @@ -117,6 +117,8 @@ typedef struct /*version_number are pushed from m2ts sections to the mpeg4sl layer so as to handle mpeg4 stream dependencies*/ u8 m2ts_version_number_plus_one; u8 m2ts_pcr; + /* HTML5 MSE Packet info */ + s64 timeStampOffset; } GF_SLHeader; diff --git a/include/gpac/term_info.h b/include/gpac/term_info.h index f38cd33..689c632 100644 --- a/include/gpac/term_info.h +++ b/include/gpac/term_info.h @@ -92,6 +92,8 @@ typedef struct u32 db_unit_count; /*number of CUs in composition memory (if any) and CM capacity*/ u16 cb_unit_count, cb_max_count; + /*inidciate that thye composition memory is bypassed for this decoder (video only) */ + Bool direct_video_memory; /*clock drift in ms of object clock: this is the delay set by the audio renderer to keep AV in sync*/ s32 clock_drift; /*codec name*/ @@ -105,8 +107,9 @@ typedef struct /*average birate over last second and max bitrate over one second at decoder input - expressed in bits per sec*/ u32 avg_bitrate, instant_bitrate, max_bitrate; - u32 total_dec_time, max_dec_time, nb_dec_frames, nb_droped; + u32 nb_dec_frames, nb_droped; u32 first_frame_time, last_frame_time; + u64 max_dec_time, total_dec_time; /*set if ISMACryp present on the object - will need refinement for IPMPX... 0: not protected - 1: protected and OK - 2: protected and DRM failed*/ diff --git a/include/gpac/tools.h b/include/gpac/tools.h index 856a205..d0a39e1 100644 --- a/include/gpac/tools.h +++ b/include/gpac/tools.h @@ -667,10 +667,18 @@ void gf_sys_close(); * \brief System clock query * * Gets the system clock time. - * \return System clock value since initialization in milliseconds. + * \return System clock value since GPAC initialization in milliseconds. */ u32 gf_sys_clock(); +/*! + * \brief High precision system clock query + * + * Gets the hight precision system clock time. + * \return System clock value since GPAC initialization in microseconds. + */ +u64 gf_sys_clock_high_res(); + /*! * \brief Sleeps thread/process * diff --git a/include/gpac/version.h b/include/gpac/version.h index e54ab18..fcd5dfb 100644 --- a/include/gpac/version.h +++ b/include/gpac/version.h @@ -41,7 +41,7 @@ #define GPAC_VERSION_MICRO 0 #include -#define GPAC_FULL_VERSION GPAC_VERSION"-rev"GPAC_SVN_REVISION +#define GPAC_FULL_VERSION GPAC_VERSION "-rev" GPAC_SVN_REVISION #endif //_GF_VERSION_H diff --git a/modules/Makefile b/modules/Makefile index 4bed765..0386349 100644 --- a/modules/Makefile +++ b/modules/Makefile @@ -20,7 +20,11 @@ PLUGDIRS+=ctx_load svg_in endif ifeq ($(DISABLE_SVG), no) -PLUGDIRS+=laser_dec svg_in vtt_in +PLUGDIRS+=laser_dec svg_in +ifeq ($(DISABLE_TTXT), no) +PLUGDIRS+=vtt_in +endif + ifneq ($(CONFIG_ZLIB), no) PLUGDIRS+=widgetman ifeq ($(DISABLE_LOADER_BT),no) diff --git a/modules/aac_in/faad_dec.c b/modules/aac_in/faad_dec.c index e7d6fe6..b37ff1e 100644 --- a/modules/aac_in/faad_dec.c +++ b/modules/aac_in/faad_dec.c @@ -235,7 +235,7 @@ static s8 FAAD_GetChannelPos(FAADDec *ffd, u32 ch_cfg) static GF_Err FAAD_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/ac3_in/ac3_in.c b/modules/ac3_in/ac3_in.c index 86d70f0..1fc81af 100644 --- a/modules/ac3_in/ac3_in.c +++ b/modules/ac3_in/ac3_in.c @@ -179,7 +179,7 @@ static void AC3_OnLiveData(AC3Reader *read, const char *data, u32 data_size) if (read->needs_connection) { read->needs_connection = 0; - bs = gf_bs_new(read->data, read->data_size, GF_BITSTREAM_READ); + bs = gf_bs_new((char *) read->data, read->data_size, GF_BITSTREAM_READ); sync = gf_ac3_parser_bs(bs, &hdr, 1); gf_bs_del(bs); if (!sync) return; @@ -195,7 +195,7 @@ static void AC3_OnLiveData(AC3Reader *read, const char *data, u32 data_size) /*need a full ac3 header*/ if (read->data_size<=7) return; - bs = gf_bs_new(read->data, read->data_size, GF_BITSTREAM_READ); + bs = gf_bs_new((char *) read->data, read->data_size, GF_BITSTREAM_READ); hdr.framesize = 0; pos = 0; while (gf_ac3_parser_bs(bs, &hdr, 0)) { @@ -205,7 +205,7 @@ static void AC3_OnLiveData(AC3Reader *read, const char *data, u32 data_size) read->sl_hdr.AU_sequenceNumber++; read->sl_hdr.compositionTimeStampFlag = 1; read->sl_hdr.compositionTimeStamp += 1536; - gf_term_on_sl_packet(read->service, read->ch, read->data + pos, hdr.framesize, &read->sl_hdr, GF_OK); + gf_term_on_sl_packet(read->service, read->ch, (char *) read->data + pos, hdr.framesize, &read->sl_hdr, GF_OK); gf_bs_skip_bytes(bs, hdr.framesize); } @@ -213,7 +213,7 @@ static void AC3_OnLiveData(AC3Reader *read, const char *data, u32 data_size) gf_bs_del(bs); if (pos) { - char *d; + u8 *d; read->data_size -= (u32) pos; d = gf_malloc(sizeof(char) * read->data_size); memcpy(d, read->data + pos, sizeof(char) * read->data_size); @@ -589,12 +589,12 @@ fetch_next: read->sl_hdr.compositionTimeStamp = read->current_time; read->data = gf_malloc(sizeof(char) * (read->data_size+read->pad_bytes)); - gf_bs_read_data(bs, read->data, read->data_size); + gf_bs_read_data(bs, (char *) read->data, read->data_size); if (read->pad_bytes) memset(read->data + read->data_size, 0, sizeof(char) * read->pad_bytes); gf_bs_del(bs); } *out_sl_hdr = read->sl_hdr; - *out_data_ptr = read->data; + *out_data_ptr =(char *) read->data; *out_data_size = read->data_size; return GF_OK; } diff --git a/modules/ac3_in/liba52_dec.c b/modules/ac3_in/liba52_dec.c index a908951..11db2f2 100644 --- a/modules/ac3_in/liba52_dec.c +++ b/modules/ac3_in/liba52_dec.c @@ -233,7 +233,7 @@ static const int ac3_channels[8] = { static GF_Err AC3_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/amr_dec/amr_dec.c b/modules/amr_dec/amr_dec.c index f483175..7da0c59 100644 --- a/modules/amr_dec/amr_dec.c +++ b/modules/amr_dec/amr_dec.c @@ -152,7 +152,7 @@ static GF_Err AMR_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capab static GF_Err AMR_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/amr_float_dec/amr_float_dec.c b/modules/amr_float_dec/amr_float_dec.c index f44036c..176cafe 100644 --- a/modules/amr_float_dec/amr_float_dec.c +++ b/modules/amr_float_dec/amr_float_dec.c @@ -195,7 +195,7 @@ static GF_Err AMR_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capab static GF_Err AMR_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/ctx_load/ctx_load.c b/modules/ctx_load/ctx_load.c index f194fa4..5a13ff7 100644 --- a/modules/ctx_load/ctx_load.c +++ b/modules/ctx_load/ctx_load.c @@ -617,8 +617,10 @@ static GF_Err CTXLoad_ProcessData(GF_SceneDecoder *plug, const char *inBuffer, u } fclose(t); } - /*remap to remote URL*/ - remote = gf_strdup(mux->file_name); + /*remap to remote URL - warning, the URL has already been resolved according to the parent path*/ + remote = gf_malloc(sizeof(char) * (strlen("gpac://")+strlen(mux->file_name)+1) ); + strcpy(remote, "gpac://"); + strcat(remote, mux->file_name); k = od->objectDescriptorID; /*if files were created we'll have to clean up (swf import)*/ if (mux->delete_file) gf_list_add(priv->files_to_delete, gf_strdup(remote)); diff --git a/modules/dx_hw/copy_pixels.c b/modules/dx_hw/copy_pixels.c index c7bc141..a0ec2d2 100644 --- a/modules/dx_hw/copy_pixels.c +++ b/modules/dx_hw/copy_pixels.c @@ -214,58 +214,6 @@ static void write_yv12_to_yuv(GF_VideoSurface *vs, unsigned char *pY, u32 src_s } } - -static void write_yv12_10_to_yuv(GF_VideoSurface *vs, unsigned char *pY, u32 src_stride, u32 src_pf, - u32 src_width, u32 src_height, const GF_Window *src_wnd, unsigned char *pU, unsigned char*pV) -{ - u32 i, j; - if (!pU) { - pU = pY + src_stride * src_height; - pV = pY + 5*src_stride * src_height/4; - } - - pY = pY + src_stride * src_wnd->y + src_wnd->x; - /*because of U and V downsampling by 2x2, working with odd Y offset will lead to a half-line shift between Y and UV components. We - therefore force an even Y offset for U and V planes.*/ - pU = pU + (src_stride * (src_wnd->y / 2) + src_wnd->x) / 2; - pV = pV + (src_stride * (src_wnd->y / 2) + src_wnd->x) / 2; - - if (vs->pixel_format == GF_PIXEL_YV12) { - for (i=0; ih; i++) { - u16 *src = (u16 *) (pY + i*src_stride); - u8 *dst = vs->video_buffer + i*vs->pitch_y; - - for (j=0; jw;j++) { - *dst = (*src) >> 2; - dst++; - src++; - } - } - - for (i=0; ih/2; i++) { - u16 *src = (u16 *) (pV + i*src_stride/2); - u8 *dst = vs->video_buffer + vs->pitch_y * vs->height + i*vs->pitch_y/2; - - for (j=0; jw/2;j++) { - *dst = (*src) >> 2; - dst++; - src++; - } - } - - for (i=0; ih/2; i++) { - u16 *src = (u16 *) (pU + i*src_stride/2); - u8 *dst = vs->video_buffer + 5*vs->pitch_y * vs->height/4 + i*vs->pitch_y/2; - - for (j=0; jw/2;j++) { - *dst = (*src) >> 2; - dst++; - src++; - } - } - } -} - static void write_yvyu_to_yuv(GF_VideoSurface *vs, unsigned char *src, u32 src_stride, u32 src_pf, u32 src_width, u32 src_height, const GF_Window *src_wnd) { @@ -612,7 +560,7 @@ void dx_copy_pixels(GF_VideoSurface *dst_s, const GF_VideoSurface *src_s, const } else if (get_yuv_base(src_s->pixel_format)==GF_PIXEL_YV12_10) { if (format_is_yuv(dst_s->pixel_format)) { /*generic YV planar to YUV (planar or not) */ - write_yv12_10_to_yuv(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->pixel_format, src_s->width, src_s->height, src_wnd, src_s->u_ptr, src_s->v_ptr); + gf_color_write_yv12_10_to_yuv(dst_s, src_s->video_buffer, src_s->u_ptr, src_s->v_ptr, src_s->pitch_y, src_s->width, src_s->height, src_wnd); return; } } else if (format_is_yuv(src_s->pixel_format)) { diff --git a/modules/dx_hw/dx_2d.c b/modules/dx_hw/dx_2d.c index 32e0ace..1081508 100644 --- a/modules/dx_hw/dx_2d.c +++ b/modules/dx_hw/dx_2d.c @@ -206,7 +206,6 @@ GF_Err InitDirectDraw(GF_VideoOutput *dr, u32 Width, u32 Height) DDSURFDESC ddsd; DDPIXELFORMAT pixelFmt; LPDIRECTDRAWCLIPPER pcClipper; - const char *opt; DDCONTEXT; if (!dd->cur_hwnd || !Width || !Height || !dd->DirectDrawCreate) return GF_BAD_PARAM; DestroyObjects(dd); @@ -305,13 +304,9 @@ GF_Err InitDirectDraw(GF_VideoOutput *dr, u32 Width, u32 Height) pcClipper->lpVtbl->Release(pcClipper); - opt = gf_modules_get_option((GF_BaseInterface *)dr, "Video", "DisableVSync"); - if (opt && !strcmp(opt, "yes")) dd->disable_vsync = GF_TRUE; - dd->ddraw_init = 1; /*if YUV not initialize, init using HW video memory to setup HW caps*/ return GF_OK; - //CreateBackBuffer(dr, Width, Height, dd->yuv_init); } static GF_Err DD_LockSurface(DDContext *dd, GF_VideoSurface *vi, LPDDRAWSURFACE surface) diff --git a/modules/dx_hw/dx_video.c b/modules/dx_hw/dx_video.c index 64253a1..8636088 100644 --- a/modules/dx_hw/dx_video.c +++ b/modules/dx_hw/dx_video.c @@ -86,6 +86,9 @@ static GETPBUFFERDCARB wglGetPbufferDCARB = NULL; typedef HDC (APIENTRY *RELEASEPBUFFERDCARB)(void *pb, HDC dc); static RELEASEPBUFFERDCARB wglReleasePbufferDCARB = NULL; +typedef BOOL (APIENTRY *PFNWGLSWAPINTERVALFARPROC)( int ); +PFNWGLSWAPINTERVALFARPROC wglSwapIntervalEXT = NULL; + static void dd_init_gl_offscreen(GF_VideoOutput *driv) { const char *opt; @@ -405,6 +408,8 @@ GF_Err DD_SetupOpenGL(GF_VideoOutput *dr, u32 offscreen_width, u32 offscreen_hei return DD_SetupOpenGL(dr, offscreen_width, offscreen_height); } + dr->max_screen_bpp = dd->bpp; + if (wglGetPixelFormatAttribivARB) { int rb, gb, bb, att; rb = gb = bb = 0; @@ -451,6 +456,14 @@ GF_Err DD_SetupOpenGL(GF_VideoOutput *dr, u32 offscreen_width, u32 offscreen_hei dd_init_gl_offscreen(dr); } + if (dd->disable_vsync) { + if (!wglSwapIntervalEXT) { + wglSwapIntervalEXT = (PFNWGLSWAPINTERVALFARPROC)wglGetProcAddress( "wglSwapIntervalEXT" ); + } + if (wglSwapIntervalEXT) { + wglSwapIntervalEXT(0); + } + } if ((dd->output_3d_type!=2) || dd->gl_hwnd) { if (!wglMakeCurrent(dd->gl_HDC, dd->gl_HRC)) return GF_IO_ERR; @@ -514,6 +527,7 @@ GF_Err DD_Setup(GF_VideoOutput *dr, void *os_handle, void *os_display, u32 init_ { RECT rc; DDCONTEXT + const char *opt; dd->os_hwnd = (HWND) os_handle; DD_SetupWindow(dr, init_flags); @@ -533,7 +547,10 @@ GF_Err DD_Setup(GF_VideoOutput *dr, void *os_handle, void *os_display, u32 init_ dd->output_3d_type = 0; #endif GetWindowRect(dd->cur_hwnd, &rc); -// return InitDirectDraw(dr, rc.right - rc.left, rc.bottom - rc.top); + + opt = gf_modules_get_option((GF_BaseInterface *)dr, "Video", "DisableVSync"); + if (opt && !strcmp(opt, "yes")) dd->disable_vsync = GF_TRUE; + return GF_OK; } @@ -557,10 +574,12 @@ static GF_Err DD_SetFullScreen(GF_VideoOutput *dr, Bool bOn, u32 *outWidth, u32 u32 MaxWidth, MaxHeight; DDCONTEXT; - if (!dd->width ||!dd->height) return GF_BAD_PARAM; if (bOn == dd->fullscreen) return GF_OK; if (!dd->fs_hwnd) return GF_NOT_SUPPORTED; + dd->fullscreen = bOn; + + if (!dd->width ||!dd->height) return GF_OK; /*whenever changing card display mode relocate fastest YUV format for blit (since it depends on the dest pixel format)*/ @@ -623,32 +642,12 @@ static GF_Err DD_SetFullScreen(GF_VideoOutput *dr, Bool bOn, u32 *outWidth, u32 dd->fs_width = MaxWidth; dd->fs_height = MaxHeight; } - SetWindowPos(dd->cur_hwnd, NULL, X, Y, dd->fs_width, dd->fs_height, SWP_NOZORDER | SWP_SHOWWINDOW | SWP_ASYNCWINDOWPOS); - -//#ifndef _WIN32_WCE - /*commented out since it causes problem on multiple monitors*/ -#if 0 - { - DEVMODE settings; - - memset(&settings, 0, sizeof(DEVMODE)); - settings.dmSize = sizeof(DEVMODE); - settings.dmPelsWidth = dd->fs_width; - settings.dmPelsHeight = dd->fs_height; - settings.dmFields = DM_PELSWIDTH | DM_PELSHEIGHT; - - if ( ChangeDisplaySettings(&settings, CDS_FULLSCREEN) != DISP_CHANGE_SUCCESSFUL ) { - GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[DirectDraw] cannot change display settings\n")); - e = GF_IO_ERR; - } - } - dd->NeedRestore = 1; -#endif + SetWindowPos(dd->cur_hwnd, NULL, X, Y, dd->fs_width, dd->fs_height, SWP_SHOWWINDOW | SWP_NOZORDER /*| SWP_ASYNCWINDOWPOS*/); dd->fs_store_width = dd->fs_width; dd->fs_store_height = dd->fs_height; } else if (dd->os_hwnd==dd->fs_hwnd) { - SetWindowPos(dd->os_hwnd, NULL, 0, 0, dd->store_width+dd->off_w, dd->store_height+dd->off_h, SWP_NOZORDER | SWP_NOMOVE | SWP_ASYNCWINDOWPOS); + SetWindowPos(dd->os_hwnd, NULL, 0, 0, dd->store_width+dd->off_w, dd->store_height+dd->off_h, SWP_NOMOVE | SWP_NOZORDER /*| SWP_ASYNCWINDOWPOS*/); } if (!e) e = DD_SetupOpenGL(dr, 0, 0); @@ -815,6 +814,7 @@ static void *NewDXVideoOutput() driv->max_screen_width = GetSystemMetrics(SM_CXSCREEN); driv->max_screen_height = GetSystemMetrics(SM_CYSCREEN); + driv->max_screen_bpp = 8; driv->hw_caps = GF_VIDEO_HW_OPENGL | GF_VIDEO_HW_OPENGL_OFFSCREEN | GF_VIDEO_HW_OPENGL_OFFSCREEN_ALPHA | GF_VIDEO_HW_HAS_HWND_HDC; DD_SetupDDraw(driv); diff --git a/modules/dx_hw/dx_window.c b/modules/dx_hw/dx_window.c index 9bbc368..e788034 100644 --- a/modules/dx_hw/dx_window.c +++ b/modules/dx_hw/dx_window.c @@ -886,7 +886,8 @@ void DD_SetupWindow(GF_VideoOutput *dr, u32 flags) /*create event thread*/ ctx->th = gf_th_new("DirectX Video"); gf_th_run(ctx->th, DD_WindowThread, dr); - while (!ctx->th_state) gf_sleep(2); + while (!ctx->th_state) + gf_sleep(1); } if (!the_video_output) the_video_output = dr; } diff --git a/modules/epoc_hw/epoc_codec.cpp b/modules/epoc_hw/epoc_codec.cpp index 4171ad4..d2d6bb3 100644 --- a/modules/epoc_hw/epoc_codec.cpp +++ b/modules/epoc_hw/epoc_codec.cpp @@ -300,7 +300,7 @@ static GF_Err EDEC_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa static GF_Err EDEC_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/ffmpeg_in/ffmpeg_decode.c b/modules/ffmpeg_in/ffmpeg_decode.c index d36c493..e1ca2e1 100644 --- a/modules/ffmpeg_in/ffmpeg_decode.c +++ b/modules/ffmpeg_in/ffmpeg_decode.c @@ -44,8 +44,8 @@ * \param size Size to allocate (will use extra padding for real size) * \return The newly allocated buffer */ -static char * ffmpeg_realloc_buffer(char * oldBuffer, u32 size){ - char * buffer; +static uint8_t * ffmpeg_realloc_buffer(uint8_t * oldBuffer, u32 size){ + uint8_t * buffer; /* Size of buffer must be larger, see avcodec_decode_video2 documentation */ u32 allocatedSz = sizeof( char ) * (FF_INPUT_BUFFER_PADDING_SIZE + size); if (oldBuffer) @@ -96,7 +96,7 @@ static void FFDEC_LoadDSI(FFDec *ffd, GF_BitStream *bs, AVCodec *codec, AVCodecC gf_free(ctx->extradata); ctx->extradata_size = dsi_size; ctx->extradata = ffmpeg_realloc_buffer(ctx->extradata, ctx->extradata_size); - gf_bs_read_data(bs, ctx->extradata, ctx->extradata_size); + gf_bs_read_data(bs, (char *) ctx->extradata, ctx->extradata_size); return; } @@ -112,8 +112,8 @@ static void FFDEC_LoadDSI(FFDec *ffd, GF_BitStream *bs, AVCodec *codec, AVCodecC gf_free(ctx->extradata); ctx->extradata_size = 0x5a + size; ctx->extradata = ffmpeg_realloc_buffer(ctx->extradata, ctx->extradata_size); - strcpy(ctx->extradata, "SVQ3"); - gf_bs_read_data(bs, (unsigned char *)ctx->extradata + 0x5a, size); + strcpy((char *) ctx->extradata, "SVQ3"); + gf_bs_read_data(bs, (char *)ctx->extradata + 0x5a, size); } } break; @@ -122,7 +122,7 @@ static void FFDEC_LoadDSI(FFDec *ffd, GF_BitStream *bs, AVCodec *codec, AVCodecC gf_free(ctx->extradata); ctx->extradata_size = dsi_size; ctx->extradata = ffmpeg_realloc_buffer(ctx->extradata, ctx->extradata_size); - gf_bs_read_data(bs, ctx->extradata, ctx->extradata_size); + gf_bs_read_data(bs, (char *)ctx->extradata, ctx->extradata_size); break; } } @@ -135,6 +135,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) AVCodecContext **ctx; AVCodec **codec; AVFrame **frame; + const char *sOpt; #ifndef GPAC_DISABLE_AV_PARSERS GF_M4VDecSpecInfo dsi; @@ -190,14 +191,16 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) /*ffmpeg specific*/ (*ctx)->block_align = gf_bs_read_u16(bs); + (*ctx)->bit_rate = gf_bs_read_u32(bs); + (*ctx)->codec_tag = gf_bs_read_u32(bs); } else if (ffd->st==GF_STREAM_VISUAL) { (*ctx)->codec_type = AVMEDIA_TYPE_VIDEO; (*ctx)->width = gf_bs_read_u16(bs); (*ctx)->height = gf_bs_read_u16(bs); + (*ctx)->bit_rate = gf_bs_read_u32(bs); + (*ctx)->codec_tag = gf_bs_read_u32(bs); + ffd->raw_pix_fmt = gf_bs_read_u32(bs); } - (*ctx)->bit_rate = gf_bs_read_u32(bs); - (*ctx)->codec_tag = gf_bs_read_u32(bs); - ffd->raw_pix_fmt = gf_bs_read_u32(bs); *codec = avcodec_find_decoder(codec_id); FFDEC_LoadDSI(ffd, bs, *codec, *ctx, 1); @@ -323,7 +326,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) if (ffd->oti == GPAC_OTI_VIDEO_HEVC) { GF_SystemRTInfo rti; u32 nb_threads, detected_nb_threads = 1; - const char *sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "OpenHEVC", "ThreadingType"); + sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "OpenHEVC", "ThreadingType"); if (sOpt && !strcmp(sOpt, "wpp")) av_opt_set(*ctx, "thread_type", "slice", 0); else if (sOpt && !strcmp(sOpt, "frame+wpp")) av_opt_set(*ctx, "thread_type", "frameslice", 0); else { @@ -336,8 +339,6 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "OpenHEVC", "NumThreads"); if (!sOpt) { char szO[100]; - //checkme I have perf using too many threads - if (detected_nb_threads > 6) detected_nb_threads = 6; sprintf(szO, "%d", detected_nb_threads); gf_modules_set_option((GF_BaseInterface *)plug, "OpenHEVC", "NumThreads", szO); nb_threads = detected_nb_threads; @@ -345,22 +346,27 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) nb_threads = atoi(sOpt); } if (nb_threads > detected_nb_threads) { - GF_LOG(GF_LOG_CODEC, GF_LOG_WARNING, ("[OpenHEVC] Initializing with %d threads but only %d available cores detected on the system\n", nb_threads, rti.nb_cores)); + GF_LOG(GF_LOG_CODEC, GF_LOG_WARNING, ("[HEVC@ffmpeg] Initializing with %d threads but only %d available cores detected on the system\n", nb_threads, rti.nb_cores)); } else { - GF_LOG(GF_LOG_CODEC, GF_LOG_INFO, ("[OpenHEVC] Initializing with %d threads\n", nb_threads)); + GF_LOG(GF_LOG_CODEC, GF_LOG_INFO, ("[HEVC@ffmpeg] Initializing with %d threads\n", nb_threads)); } - fprintf(stderr, "[OpenHEVC] Initializing with %d threads\n", nb_threads); + fprintf(stderr, "[HEVC@ffmpeg] Initializing with %d threads\n", nb_threads); av_opt_set_int(*ctx, "threads", nb_threads, 0); /* Set the decoder id */ //av_opt_set_int(openHevcContext->c->priv_data, "decoder-id", i, 0); + sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "OpenHEVC", "CBUnits"); + if (!sOpt) gf_modules_set_option((GF_BaseInterface *)plug, "OpenHEVC", "CBUnits", "4"); + if (sOpt) ffd->output_cb_size = atoi(sOpt); } #endif //HAS_HEVC + if (!ffd->output_cb_size) ffd->output_cb_size = 4; + if (codec_id == CODEC_ID_RAWVIDEO) { (*ctx)->codec_id = CODEC_ID_RAWVIDEO; (*ctx)->pix_fmt = ffd->raw_pix_fmt; - if ((*ctx)->extradata && strstr((*ctx)->extradata, "BottomUp")) ffd->flipped = 1; + if ((*ctx)->extradata && strstr((char *) (*ctx)->extradata, "BottomUp")) ffd->flipped = 1; } else { #ifdef USE_AVCTX3 if (avcodec_open2((*ctx), (*codec), NULL )<0) return GF_NON_COMPLIANT_BITSTREAM; @@ -400,7 +406,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) { AVPacket pkt; av_init_packet(&pkt); - pkt.data = esd->decoderConfig->decoderSpecificInfo->data; + pkt.data = (uint8_t *) esd->decoderConfig->decoderSpecificInfo->data; pkt.size = esd->decoderConfig->decoderSpecificInfo->dataLength; avcodec_decode_video2((*ctx), *frame, &gotpic, &pkt); } @@ -431,6 +437,16 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) } + sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "Systems", "Output8bit"); + if (!sOpt) gf_modules_set_option((GF_BaseInterface *)plug, "Systems", "Output8bit", (ffd->display_bpp>8) ? "no" : "yes"); + if (sOpt && !strcmp(sOpt, "yes")) ffd->output_as_8bit = 1; + + if (ffd->output_as_8bit && (ffd->stride > (u32) (*ctx)->width)) { + ffd->stride /=2; + ffd->out_size /= 2; + ffd->conv_to_8bit = 1; + } + return GF_OK; } @@ -481,6 +497,10 @@ static GF_Err FFDEC_DetachStream(GF_BaseDecoder *plug, u16 ES_ID) *sws = NULL; } #endif + if (ffd->conv_buffer) { + gf_free(ffd->conv_buffer); + ffd->conv_buffer = NULL; + } return GF_OK; } @@ -501,8 +521,11 @@ static GF_Err FFDEC_GetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability *ca capability->cap.valueInt = 1; return GF_OK; case GF_CODEC_DIRECT_OUTPUT: - capability->cap.valueBool = /*GF_TRUE*/GF_FALSE; + capability->cap.valueBool = GF_TRUE; return GF_OK; + case GF_CODEC_WANTS_THREAD: + capability->cap.valueBool= GF_TRUE; + break; } if (!ffd->base_ctx) { @@ -529,7 +552,7 @@ static GF_Err FFDEC_GetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability *ca break; case GF_CODEC_BUFFER_MAX: /*for audio let the systems engine decide since we may have very large block size (1 sec with some QT movies)*/ - capability->cap.valueInt = (ffd->st==GF_STREAM_AUDIO) ? 0 : (ffd->is_image ? 1 : 4); + capability->cap.valueInt = (ffd->st==GF_STREAM_AUDIO) ? 0 : (ffd->is_image ? 1 : ffd->output_cb_size); break; /*by default AAC access unit lasts num_samples (timescale being sampleRate)*/ case GF_CODEC_CU_DURATION: @@ -542,8 +565,12 @@ static GF_Err FFDEC_GetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability *ca capability->cap.valueInt = ffd->base_ctx->height; break; case GF_CODEC_STRIDE: - capability->cap.valueInt = ffd->stride; - if (ffd->out_pix_fmt==GF_PIXEL_RGB_24) capability->cap.valueInt *= 3; + if (ffd->out_pix_fmt==GF_PIXEL_RGB_24) + capability->cap.valueInt = ffd->stride*3; + else if (ffd->conv_buffer) + capability->cap.valueInt = ffd->base_ctx->width; + else + capability->cap.valueInt = ffd->stride; break; case GF_CODEC_FPS: capability->cap.valueFloat = 30.0f; @@ -553,6 +580,7 @@ static GF_Err FFDEC_GetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability *ca break; case GF_CODEC_PIXEL_FORMAT: if (ffd->base_ctx->width) capability->cap.valueInt = ffd->out_pix_fmt; + if (ffd->conv_buffer) capability->cap.valueInt = GF_PIXEL_YV12; break; /*ffmpeg performs frame reordering internally*/ case GF_CODEC_REORDER: @@ -586,6 +614,9 @@ static GF_Err FFDEC_SetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability cap assert(plug); assert( ffd ); switch (capability.CapCode) { + case GF_CODEC_DISPLAY_BPP: + ffd->display_bpp = capability.cap.valueInt; + return GF_OK; case GF_CODEC_WAIT_RAP: ffd->frame_start = 0; if (ffd->st==GF_STREAM_VISUAL) { @@ -604,7 +635,7 @@ static GF_Err FFDEC_SetCapabilities(GF_BaseDecoder *plug, GF_CodecCapability cap static GF_Err FFDEC_ProcessData(GF_MediaDecoder *plug, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { @@ -613,7 +644,7 @@ static GF_Err FFDEC_ProcessData(GF_MediaDecoder *plug, #endif AVPicture pict; u32 pix_out; - s32 w, h, gotpic; + s32 w, h, gotpic, stride; u32 outsize; AVCodecContext *ctx; AVCodec **codec; @@ -662,7 +693,7 @@ static GF_Err FFDEC_ProcessData(GF_MediaDecoder *plug, #ifdef USE_AVCODEC2 av_init_packet(&pkt); - pkt.data = inBuffer; + pkt.data = (uint8_t *)inBuffer; pkt.size = inBufferLength; #endif /*audio stream*/ @@ -718,14 +749,14 @@ redecode: if (ffd->audio_frame->format==AV_SAMPLE_FMT_FLTP) { s32 i, j; s16 *output = (s16 *) outBuffer; - for (i=0 ; iaudio_frame->nb_samples ; i++) { - for (j=0; jchannels; j++) { - Float* inputChannel = (Float*)ffd->audio_frame->extended_data[j]; + for (j=0; jchannels; j++) { + Float* inputChannel = (Float*)ffd->audio_frame->extended_data[j]; + for (i=0 ; iaudio_frame->nb_samples ; i++) { Float sample = inputChannel[i]; if (sample<-1.0f) sample=-1.0f; else if (sample>1.0f) sample=1.0f; - output[i*ctx->channels + j] = (int16_t) (sample * GF_SHORT_MAX); + output[i*ctx->channels + j] = (int16_t) (sample * GF_SHORT_MAX ); } } } else { @@ -764,10 +795,10 @@ redecode: if (ffd->raw_pix_fmt==PIX_FMT_BGR24) { s32 i, j; for (j=0; jheight; j++) { - u8 *src = inBuffer + j*3*ctx->width; - u8 *dst = outBuffer + j*3*ctx->width; + u8 *src = (u8 *) inBuffer + j*3*ctx->width; + u8 *dst = (u8 *)outBuffer + j*3*ctx->width; if (ffd->flipped) { - dst = outBuffer + (ctx->height-j-1) * 3*ctx->width; + dst = (u8 *)outBuffer + (ctx->height-j-1) * 3*ctx->width; } for (i=0; iwidth; i++) { dst[0] = src[2]; @@ -898,21 +929,30 @@ redecode: return GF_BUFFER_TOO_SMALL; } - + stride = frame->linesize[0]; +#ifndef NO_10bit + if ((ctx->pix_fmt == PIX_FMT_YUV420P10LE) && ffd->output_as_8bit && (frame->linesize[0] >= 2*w) ) { + ffd->conv_to_8bit = 1; + stride=w; + } +#endif + /*recompute outsize in case on-the-fly change*/ if ((w != ctx->width) || (h != ctx->height) - || (ffd->direct_output && (frame->linesize[0] != ffd->stride)) - || (ffd->out_pix_fmt==GF_PIXEL_YV12 && (ctx->pix_fmt != PIX_FMT_YUV420P)) - ) { + || (ffd->direct_output && (stride != ffd->stride)) + || ((ffd->out_pix_fmt==GF_PIXEL_YV12) && (ctx->pix_fmt != PIX_FMT_YUV420P) && !ffd->output_as_8bit ) + //need to realloc the conversion buffer + || (ffd->conv_to_8bit && !ffd->conv_buffer && ffd->direct_output) + ) { - ffd->stride = ffd->direct_output ? frame->linesize[0] : ctx->width; + ffd->stride = (!ffd->conv_to_8bit && ffd->direct_output) ? frame->linesize[0] : ctx->width; if (ffd->out_pix_fmt == GF_PIXEL_RGB_24) { outsize = ctx->width * ctx->height * 3; } #ifndef NO_10bit //this YUV format is handled natively in GPAC - else if (ctx->pix_fmt == PIX_FMT_YUV420P10LE) { - ffd->stride = 2* ctx->width; + else if ((ctx->pix_fmt == PIX_FMT_YUV420P10LE) && !ffd->output_as_8bit) { + ffd->stride = ffd->direct_output ? frame->linesize[0] : ctx->width*2; outsize = ffd->stride * ctx->height * 3 / 2; ffd->out_pix_fmt = GF_PIXEL_YV12_10; } @@ -951,6 +991,11 @@ redecode: } #endif ffd->had_pic = 1; + + if (ffd->conv_to_8bit && ffd->direct_output) { + ffd->conv_buffer = gf_realloc(ffd->conv_buffer, sizeof(char)*ffd->out_size); + } + return GF_BUFFER_TOO_SMALL; } /*check PAR in case on-the-fly change*/ @@ -981,17 +1026,32 @@ redecode: } #endif - if (ffd->direct_output) { + if (ffd->direct_output && !ffd->conv_to_8bit) { *outBufferLength = ffd->out_size; return GF_OK; } + if (ffd->conv_to_8bit) { + GF_VideoSurface dst; + memset(&dst, 0, sizeof(GF_VideoSurface)); + dst.width = ctx->width; + dst.height = ctx->height; + dst.pitch_y = ctx->width; + dst.video_buffer = ffd->direct_output ? ffd->conv_buffer : outBuffer; + dst.pixel_format = GF_PIXEL_YV12; + + gf_color_write_yv12_10_to_yuv(&dst, (u8 *) frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], ctx->width, ctx->height, NULL); + *outBufferLength = ffd->out_size; + return GF_OK; + } + + if (ES_ID == ffd->depth_ES_ID) { s32 i; u8 *pYO, *pYD; pYO = frame->data[0]; - pYD = outBuffer+ffd->yuv_size; + pYD = (u8 *) outBuffer+ffd->yuv_size; for (i=0; iheight; i++) { memcpy(pYD, pYO, sizeof(char) * ctx->width); pYD += ctx->width; @@ -1035,13 +1095,13 @@ redecode: memset(&pict, 0, sizeof(pict)); if (ffd->out_pix_fmt==GF_PIXEL_RGB_24) { - pict.data[0] = outBuffer; + pict.data[0] = (uint8_t *)outBuffer; pict.linesize[0] = 3*ctx->width; pix_out = PIX_FMT_RGB24; } else { - pict.data[0] = outBuffer; - pict.data[1] = outBuffer + ffd->stride * ctx->height; - pict.data[2] = outBuffer + 5 * ffd->stride * ctx->height / 4; + pict.data[0] = (uint8_t *)outBuffer; + pict.data[1] = (uint8_t *)outBuffer + ffd->stride * ctx->height; + pict.data[2] = (uint8_t *)outBuffer + 5 * ffd->stride * ctx->height / 4; pict.linesize[0] = ffd->stride; pict.linesize[1] = pict.linesize[2] = ffd->stride/2; pix_out = PIX_FMT_YUV420P; @@ -1085,6 +1145,14 @@ static GF_Err FFDEC_GetOutputBuffer(GF_MediaDecoder *ifcg, u16 ES_ID, u8 **pY_or FFDec *ffd = ifcg->privateStack; AVFrame *frame; + + if (ffd->conv_buffer) { + *pY_or_RGB = (u8 *) ffd->conv_buffer; + *pU = (u8 *) ffd->conv_buffer + ffd->stride * ffd->base_ctx->height; + *pV = (u8 *) ffd->conv_buffer + 5*ffd->stride * ffd->base_ctx->height/4; + return GF_OK; + } + if (ES_ID && (ffd->depth_ES_ID==ES_ID)) { frame = ffd->depth_frame; *pY_or_RGB = frame->data[0]; diff --git a/modules/ffmpeg_in/ffmpeg_demux.c b/modules/ffmpeg_in/ffmpeg_demux.c index 09aaa0a..ed818a1 100644 --- a/modules/ffmpeg_in/ffmpeg_demux.c +++ b/modules/ffmpeg_in/ffmpeg_demux.c @@ -69,8 +69,6 @@ static u32 FFDemux_Run(void *par) { AVPacket pkt; s64 seek_to; - u64 seek_audio, seek_video; - Bool video_init, do_seek, map_audio_time, map_video_time; GF_NetworkCommand com; GF_NetworkCommand map; GF_SLHeader slh; @@ -80,28 +78,23 @@ static u32 FFDemux_Run(void *par) map.command_type = GF_NET_CHAN_MAP_TIME; memset(&com, 0, sizeof(GF_NetworkCommand)); - com.command_type = GF_NET_CHAN_BUFFER_QUERY; + com.command_type = GF_NET_BUFFER_QUERY; memset(&slh, 0, sizeof(GF_SLHeader)); - + slh.compositionTimeStampFlag = slh.decodingTimeStampFlag = 1; - seek_to = (s64) (AV_TIME_BASE*ffd->seek_time); - map_video_time = !ffd->seekable; - - video_init = (seek_to && ffd->video_ch) ? GF_FALSE : GF_TRUE; - seek_audio = seek_video = 0; - if (ffd->seekable && (ffd->audio_st>=0)) seek_audio = (u64) (s64) (ffd->seek_time*ffd->audio_tscale.den); - if (ffd->seekable && (ffd->video_st>=0)) seek_video = (u64) (s64) (ffd->seek_time*ffd->video_tscale.den); - - /*it appears that ffmpeg has trouble resyncing on some mpeg files - we trick it by restarting to 0 to get the - first video frame, and only then seek*/ - if (ffd->seekable) av_seek_frame(ffd->ctx, -1, video_init ? seek_to : 0, AVSEEK_FLAG_BACKWARD); - do_seek = !video_init; - map_audio_time = video_init ? ffd->unreliable_audio_timing : 0; - gf_sleep(1000); while (ffd->is_running) { + if ((!ffd->video_ch && (ffd->video_st>=0)) || (!ffd->audio_ch && (ffd->audio_st>=0))) { + gf_sleep(100); + continue; + } + if ((ffd->seek_time>=0) && ffd->seekable) { + seek_to = (s64) (AV_TIME_BASE*ffd->seek_time); + av_seek_frame(ffd->ctx, -1, seek_to, AVSEEK_FLAG_BACKWARD); + ffd->seek_time = -1; + } pkt.stream_index = -1; /*EOF*/ if (av_read_frame(ffd->ctx, &pkt) <0) break; @@ -113,73 +106,41 @@ static u32 FFDemux_Run(void *par) gf_mx_p(ffd->mx); /*blindly send audio as soon as video is init*/ - if (ffd->audio_ch && (pkt.stream_index == ffd->audio_st) && !do_seek) { + if (ffd->audio_ch && (pkt.stream_index == ffd->audio_st) ) { +// u64 seek_audio = ffd->seek_time ? (u64) (s64) (ffd->seek_time*ffd->audio_tscale.den) : 0; slh.compositionTimeStamp *= ffd->audio_tscale.num; slh.decodingTimeStamp *= ffd->audio_tscale.num; - if (map_audio_time) { - map.base.on_channel = ffd->audio_ch; - map.map_time.media_time = ffd->seek_time; - /*mapwith TS=0 since we don't use SL*/ - map.map_time.timestamp = 0; - map.map_time.reset_buffers = 1; - map_audio_time = 0; - gf_term_on_command(ffd->service, &map, GF_OK); - } - else if (slh.compositionTimeStamp < seek_audio) { +#if 0 + if (slh.compositionTimeStamp < seek_audio) { slh.decodingTimeStamp = slh.compositionTimeStamp = seek_audio; } - gf_term_on_sl_packet(ffd->service, ffd->audio_ch, pkt.data, pkt.size, &slh, GF_OK); +#endif + gf_term_on_sl_packet(ffd->service, ffd->audio_ch, (char *) pkt.data, pkt.size, &slh, GF_OK); } else if (ffd->video_ch && (pkt.stream_index == ffd->video_st)) { +// u64 seek_video = ffd->seek_time ? (u64) (s64) (ffd->seek_time*ffd->video_tscale.den) : 0; slh.compositionTimeStamp *= ffd->video_tscale.num; slh.decodingTimeStamp *= ffd->video_tscale.num; - /*if we get pts = 0 after a seek the demuxer is reseting PTSs, so force map time*/ - if ((!do_seek && seek_to && !slh.compositionTimeStamp) || (map_video_time) ) { - seek_to = 0; - map_video_time = 0; - - map.base.on_channel = ffd->video_ch; - map.map_time.timestamp = (u64) pkt.pts; -// map.map_time.media_time = ffd->seek_time; - map.map_time.media_time = 0; - map.map_time.reset_buffers = 0; - gf_term_on_command(ffd->service, &map, GF_OK); - } - else if (slh.compositionTimeStamp < seek_video) { +#if 0 + if (slh.compositionTimeStamp < seek_video) { slh.decodingTimeStamp = slh.compositionTimeStamp = seek_video; } - gf_term_on_sl_packet(ffd->service, ffd->video_ch, pkt.data, pkt.size, &slh, GF_OK); - video_init = 1; +#endif + gf_term_on_sl_packet(ffd->service, ffd->video_ch, (char *) pkt.data, pkt.size, &slh, GF_OK); } gf_mx_v(ffd->mx); av_free_packet(&pkt); - /*here's the trick - only seek after sending the first packets of each stream - this allows ffmpeg video decoders - to resync properly*/ - if (do_seek && video_init && ffd->seekable) { - av_seek_frame(ffd->ctx, -1, seek_to, AVSEEK_FLAG_BACKWARD); - do_seek = 0; - map_audio_time = ffd->unreliable_audio_timing; - } /*sleep untill the buffer occupancy is too low - note that this work because all streams in this demuxer are synchronized*/ - while (1) { - if (ffd->audio_ch) { - com.base.on_channel = ffd->audio_ch; - gf_term_on_command(ffd->service, &com, GF_OK); - if (com.buffer.occupancy < ffd->data_buffer_ms) break; - } - if (ffd->video_ch) { - com.base.on_channel = ffd->video_ch; - gf_term_on_command(ffd->service, &com, GF_OK); - if (com.buffer.occupancy < ffd->data_buffer_ms) break; - } + while (ffd->audio_run || ffd->video_run) { + gf_term_on_command(ffd->service, &com, GF_OK); + if (com.buffer.occupancy < com.buffer.max) + break; gf_sleep(10); - /*escape if disconnect*/ - if (!ffd->audio_run && !ffd->video_run) break; } if (!ffd->audio_run && !ffd->video_run) break; } @@ -402,7 +363,7 @@ opaque_audio: gf_bs_write_u32(bs, dec->bit_rate); gf_bs_write_u32(bs, dec->codec_tag); if (dec->extradata_size) { - gf_bs_write_data(bs, dec->extradata, dec->extradata_size); + gf_bs_write_data(bs, (char *) dec->extradata, dec->extradata_size); } gf_bs_get_content(bs, (char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); @@ -447,7 +408,7 @@ opaque_video: gf_bs_write_u32(bs, dec->pix_fmt); if (dec->extradata_size) { - gf_bs_write_data(bs, dec->extradata, dec->extradata_size); + gf_bs_write_data(bs, (char *) dec->extradata, dec->extradata_size); } gf_bs_get_content(bs, (char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); @@ -621,7 +582,7 @@ static GF_Err FFD_ConnectService(GF_InputService *plug, GF_ClientService *serv, } else { pd.filename = szName; pd.buf_size = ffd->buffer_used; - pd.buf = ffd->buffer; + pd.buf = (u8 *) ffd->buffer; av_in = av_probe_input_format(&pd, 1); if (!av_in) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] error probing file %s - probe start with %c %c %c %c\n", url, ffd->buffer[0], ffd->buffer[1], ffd->buffer[2], ffd->buffer[3])); @@ -882,7 +843,6 @@ static GF_Err FFD_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) case GF_NET_CHAN_INTERACTIVE: return ffd->seekable ? GF_OK : GF_NOT_SUPPORTED; case GF_NET_CHAN_BUFFER: - com->buffer.max = com->buffer.min = 0; return GF_OK; case GF_NET_CHAN_DURATION: if (ffd->ctx->duration == AV_NOPTS_VALUE) diff --git a/modules/ffmpeg_in/ffmpeg_in.h b/modules/ffmpeg_in/ffmpeg_in.h index 6a6f726..8845c6d 100644 --- a/modules/ffmpeg_in/ffmpeg_in.h +++ b/modules/ffmpeg_in/ffmpeg_in.h @@ -137,6 +137,7 @@ typedef struct Bool direct_output; u32 stride; + u32 output_cb_size; /*for audio packed frames*/ u32 frame_start; char audio_buf[192000]; @@ -160,9 +161,15 @@ typedef struct #endif #ifdef USE_AVCTX3 - AVFrame *audio_frame; + AVFrame *audio_frame; #endif + + + Bool output_as_8bit; + u32 display_bpp; + Bool conv_to_8bit; + char *conv_buffer; } FFDec; void *FFDEC_Load(); diff --git a/modules/ft_font/ft_font.c b/modules/ft_font/ft_font.c index 3ed1c55..54777a1 100644 --- a/modules/ft_font/ft_font.c +++ b/modules/ft_font/ft_font.c @@ -199,7 +199,7 @@ static Bool ft_enum_fonts_dir(void *cbck, char *file_name, char *file_path) { GF_LOG(GF_LOG_DEBUG, GF_LOG_PARSER, ("[FreeType] Scanning directory %s (%s)\n", file_name, file_path)); gf_enum_directory(file_path, 0, ft_enum_fonts, cbck, "ttf;ttc"); - return gf_enum_directory(file_path, 1, ft_enum_fonts_dir, cbck, NULL); + return (gf_enum_directory(file_path, 1, ft_enum_fonts_dir, cbck, NULL)==GF_OK) ? GF_FALSE : GF_TRUE; } diff --git a/modules/gapi/gapi.cpp b/modules/gapi/gapi.cpp index 5019495..03ac8e8 100644 --- a/modules/gapi/gapi.cpp +++ b/modules/gapi/gapi.cpp @@ -1184,6 +1184,7 @@ static Bool check_resolution_switch(GF_VideoOutput *dr, u32 width, u32 height) GF_Event evt; dr->max_screen_width = gctx->screen_w = width; dr->max_screen_height = gctx->screen_h = height; + dr->max_screen_bpp = 8;//we don't filter for bpp less than 8 evt.type = GF_EVENT_RESOLUTION; evt.size.width = dr->max_screen_width; diff --git a/modules/gpac_js/gpac_js.c b/modules/gpac_js/gpac_js.c index ee68412..965c456 100644 --- a/modules/gpac_js/gpac_js.c +++ b/modules/gpac_js/gpac_js.c @@ -183,11 +183,21 @@ static SMJS_FUNC_PROP_GET( gpac_getProperty) *vp = INT_TO_JSVAL( (term->compositor->video_out->hw_caps & GF_VIDEO_HW_HAS_RGB) ? 1 : 0 ); } else if (!strcmp(prop_name, "hardware_rgba")) { - *vp = INT_TO_JSVAL( (term->compositor->video_out->hw_caps & GF_VIDEO_HW_HAS_RGBA) ? 1 : 0 ); + u32 has_rgba = (term->compositor->video_out->hw_caps & GF_VIDEO_HW_HAS_RGBA) ? 1 : 0; +#ifndef GPAC_DISABLE_3D + if (term->compositor->hybrid_opengl || term->compositor->is_opengl) has_rgba = 1; +#endif + *vp = INT_TO_JSVAL( has_rgba ); } else if (!strcmp(prop_name, "hardware_stretch")) { *vp = INT_TO_JSVAL( (term->compositor->video_out->hw_caps & GF_VIDEO_HW_HAS_STRETCH) ? 1 : 0 ); } + else if (!strcmp(prop_name, "screen_width")) { + *vp = INT_TO_JSVAL( term->compositor->video_out->max_screen_width); + } + else if (!strcmp(prop_name, "screen_height")) { + *vp = INT_TO_JSVAL( term->compositor->video_out->max_screen_height); + } else if (!strcmp(prop_name, "http_bitrate")) { *vp = INT_TO_JSVAL( gf_dm_get_data_rate(term->downloader)*8/1024); } diff --git a/modules/img_in/bmp_dec.c b/modules/img_in/bmp_dec.c index 9bfc3c1..1665103 100644 --- a/modules/img_in/bmp_dec.c +++ b/modules/img_in/bmp_dec.c @@ -106,7 +106,7 @@ static GF_Err BMP_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capab static GF_Err BMP_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/img_in/jp2_dec.c b/modules/img_in/jp2_dec.c index f4d5209..46b87ec 100644 --- a/modules/img_in/jp2_dec.c +++ b/modules/img_in/jp2_dec.c @@ -195,7 +195,7 @@ static int int_ceildivpow2(int a, int b) { static GF_Err JP2_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/img_in/jpeg_dec.c b/modules/img_in/jpeg_dec.c index e91b54d..d5542c3 100644 --- a/modules/img_in/jpeg_dec.c +++ b/modules/img_in/jpeg_dec.c @@ -101,7 +101,7 @@ static GF_Err JPEG_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa static GF_Err JPEG_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/img_in/png_dec.c b/modules/img_in/png_dec.c index 83decd4..f42ce62 100644 --- a/modules/img_in/png_dec.c +++ b/modules/img_in/png_dec.c @@ -115,7 +115,7 @@ static GF_Err PNG_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capab static GF_Err PNG_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/isom_in/isom_in.h b/modules/isom_in/isom_in.h index cb02be1..75fd3fa 100644 --- a/modules/isom_in/isom_in.h +++ b/modules/isom_in/isom_in.h @@ -73,6 +73,7 @@ typedef struct s32 has_pending_segments; Bool clock_discontinuity; + Bool disconnected; } ISOMReader; @@ -116,6 +117,9 @@ typedef struct Bool buffering; u32 buffer_min, buffer_max; + + + u32 nalu_extract_mode; } ISOMChannel; void isor_reset_reader(ISOMChannel *ch); void isor_reader_get_sample(ISOMChannel *ch); diff --git a/modules/isom_in/load.c b/modules/isom_in/load.c index da5ec7c..0d437ed 100644 --- a/modules/isom_in/load.c +++ b/modules/isom_in/load.c @@ -131,70 +131,6 @@ void isor_declare_objects(ISOMReader *read) if (esd) { gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_BASE, 1, &base_track); esd->has_ref_base = base_track ? GF_TRUE : GF_FALSE; - /*FIXME: if we declare only SPS/PPS of the highest layer, we have a problem in decoding even though we have all SPS/PPS inband (OpenSVC bug ?)*/ - /*so we add by default the SPS/PPS of the lower layers to this esd*/ - if (esd->has_ref_base && add_ps_lower) { - u32 count, refIndex, ref_track, num_sps, num_pps, t; - GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); - GF_AVCConfig *avccfg, *svccfg; - - count = gf_isom_get_reference_count(read->mov, i+1, GF_ISOM_REF_SCAL); - for (refIndex = count; refIndex != 0; refIndex--) { - gf_isom_get_reference(read->mov, i+1, GF_ISOM_REF_SCAL, refIndex, &ref_track); - avccfg = gf_isom_avc_config_get(read->mov, ref_track, 1); - svccfg = gf_isom_svc_config_get(read->mov, ref_track, 1); - if (avccfg) { - num_sps = gf_list_count(avccfg->sequenceParameterSets); - for (t = 0; t < num_sps; t++) { - GF_AVCConfigSlot *slc = gf_list_get(avccfg->sequenceParameterSets, t); - GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); - sl->id = slc->id; - sl->size = slc->size; - sl->data = (char*)gf_malloc(sizeof(char)*sl->size); - memcpy(sl->data, slc->data, sizeof(char)*sl->size); - gf_list_insert(cfg->sequenceParameterSets, sl, 0); - } - num_pps = gf_list_count(avccfg->pictureParameterSets); - for (t = 0; t < num_sps; t++) { - GF_AVCConfigSlot *slc = gf_list_get(avccfg->pictureParameterSets, t); - GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); - sl->id = slc->id; - sl->size = slc->size; - sl->data = (char*)gf_malloc(sizeof(char)*sl->size); - memcpy(sl->data, slc->data, sizeof(char)*sl->size); - gf_list_insert(cfg->pictureParameterSets, sl, 0); - } - gf_odf_avc_cfg_del(avccfg); - } - if (svccfg) { - num_sps = gf_list_count(svccfg->sequenceParameterSets); - for (t = 0; t < num_sps; t++) { - GF_AVCConfigSlot *slc = gf_list_get(svccfg->sequenceParameterSets, t); - GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); - sl->id = slc->id; - sl->size = slc->size; - sl->data = (char*)gf_malloc(sizeof(char)*sl->size); - memcpy(sl->data, slc->data, sizeof(char)*sl->size); - gf_list_insert(cfg->sequenceParameterSets, sl, 0); - } - num_pps = gf_list_count(svccfg->pictureParameterSets); - for (t = 0; t < num_pps; t++) { - GF_AVCConfigSlot *slc = gf_list_get(svccfg->pictureParameterSets, t); - GF_AVCConfigSlot *sl = (GF_AVCConfigSlot*)gf_malloc(sizeof(GF_AVCConfigSlot)); - sl->id = slc->id; - sl->size = slc->size; - sl->data = (char*)gf_malloc(sizeof(char)*sl->size); - memcpy(sl->data, slc->data, sizeof(char)*sl->size); - gf_list_insert(cfg->pictureParameterSets, sl, 0); - } - gf_odf_avc_cfg_del(svccfg); - } - } - - if (esd->decoderConfig->decoderSpecificInfo->data) gf_free(esd->decoderConfig->decoderSpecificInfo->data); - gf_odf_avc_cfg_write(cfg, &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); - gf_odf_avc_cfg_del(cfg); - } od = (GF_ObjectDescriptor *) gf_odf_desc_new(GF_ODF_OD_TAG); od->service_ifce = read->input; diff --git a/modules/isom_in/read.c b/modules/isom_in/read.c index b3bb7f5..ede0144 100644 --- a/modules/isom_in/read.c +++ b/modules/isom_in/read.c @@ -417,6 +417,7 @@ GF_Err ISOR_CloseService(GF_InputService *plug) read = (ISOMReader *) plug->priv; reply = GF_OK; + read->disconnected = GF_TRUE; if (read->mov) gf_isom_close(read->mov); read->mov = NULL; @@ -763,7 +764,8 @@ GF_Err ISOR_ConnectChannel(GF_InputService *plug, LPNETCHANNEL channel, const ch gf_isom_get_reference(ch->owner->mov, ch->track, GF_ISOM_REF_BASE, 1, &ch->base_track); ch->next_track = 0; /*in scalable mode add SPS/PPS in-band*/ - gf_isom_set_nalu_extract_mode(ch->owner->mov, ch->track, GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG); + ch->nalu_extract_mode = GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG /*| GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG*/; + gf_isom_set_nalu_extract_mode(ch->owner->mov, ch->track, ch->nalu_extract_mode); break; } @@ -968,7 +970,7 @@ u32 gf_channel_switch_quality(ISOMChannel *ch, GF_ISOFile *the_file, Bool switch } /*in scalable mode add SPS/PPS in-band*/ - gf_isom_set_nalu_extract_mode(the_file, next_track, GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG); + gf_isom_set_nalu_extract_mode(the_file, next_track, ch->nalu_extract_mode); return next_track; } @@ -983,6 +985,7 @@ GF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) if (!plug || !plug->priv || !com) return GF_SERVICE_ERROR; read = (ISOMReader *) plug->priv; + if (read->disconnected) return GF_OK; if (com->command_type==GF_NET_SERVICE_INFO) { u32 tag_len; @@ -1026,12 +1029,8 @@ GF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) } return GF_OK; } - if (com->command_type == GF_NET_SERVICE_PROXY_CHUNK_RECEIVE) { - isor_flush_data(read, 1, 1); - return GF_OK; - } - if (com->command_type == GF_NET_SERVICE_PROXY_SEGMENT_RECEIVE) { - isor_flush_data(read, 1, 0); + if (com->command_type == GF_NET_SERVICE_PROXY_DATA_RECEIVE) { + isor_flush_data(read, 1, com->proxy_data.is_chunk); return GF_OK; } if (com->command_type == GF_NET_SERVICE_FLUSH_DATA) { @@ -1139,10 +1138,17 @@ GF_Err ISOR_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) gf_odf_desc_del((GF_Descriptor *) dcd); } return GF_OK; + } + case GF_NET_CHAN_NALU_MODE: + ch->nalu_extract_mode = GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG; + //when this is set, we work in real scalable (eg N streams reassembled by the player) so only extract the layer. This wll need refinements if we plan to support + //several scalable layers ... + if (com->nalu_mode.extract_mode==1) ch->nalu_extract_mode |= GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG | GF_ISOM_NALU_EXTRACT_VDRD_FLAG | GF_ISOM_NALU_EXTRACT_LAYER_ONLY; + gf_isom_set_nalu_extract_mode(ch->owner->mov, ch->track, ch->nalu_extract_mode); + break; default: break; } - } return GF_NOT_SUPPORTED; } diff --git a/modules/isom_in/read_ch.c b/modules/isom_in/read_ch.c index 360c0a8..f325e71 100644 --- a/modules/isom_in/read_ch.c +++ b/modules/isom_in/read_ch.c @@ -268,7 +268,7 @@ next_segment: } } /*rewrite all upcoming SPS/PPS into the samples*/ - gf_isom_set_nalu_extract_mode(read->mov, ch->track, GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG); + gf_isom_set_nalu_extract_mode(read->mov, ch->track, ch->nalu_extract_mode); ch->last_state = GF_OK; @@ -464,7 +464,6 @@ void isor_reader_get_sample(ISOMChannel *ch) ch->sample_num++; fetch_next: ch->sample = gf_isom_get_sample(ch->owner->mov, ch->track, ch->sample_num, &ivar); - /*if sync shadow / carousel RAP skip*/ if (ch->sample && (ch->sample->IsRAP==2)) { gf_isom_sample_del(&ch->sample); @@ -530,6 +529,7 @@ fetch_next: ch->last_state = GF_OK; ch->current_slh.accessUnitEndFlag = ch->current_slh.accessUnitStartFlag = 1; ch->current_slh.accessUnitLength = ch->sample->dataLength; + ch->current_slh.au_duration = gf_isom_get_sample_duration(ch->owner->mov, ch->track, ch->sample_num); /*still seeking or not ?*/ if (ch->start <= ch->sample->DTS + ch->sample->CTS_Offset) { ch->current_slh.decodingTimeStamp = ch->sample->DTS; @@ -661,8 +661,8 @@ void isor_flush_data(ISOMReader *read, Bool check_buffer_level, Bool is_chunk_fl } } if (buffer_full) { - read->in_data_flush = 0; read->has_pending_segments++; + read->in_data_flush = 0; gf_mx_v(read->segment_mutex); if (count) { GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[IsoMedia] Buffer level %d ms higher than max allowed %d ms - skipping dispatch\n", com.buffer.occupancy, com.buffer.max)); diff --git a/modules/mp3_in/mad_dec.c b/modules/mp3_in/mad_dec.c index 7f3f7ca..1efe787 100644 --- a/modules/mp3_in/mad_dec.c +++ b/modules/mp3_in/mad_dec.c @@ -202,7 +202,7 @@ static GF_Err MAD_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capab static GF_Err MAD_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/mpd_in/mpd_in.c b/modules/mpd_in/mpd_in.c index 7b5abbe..d4ecb24 100644 --- a/modules/mpd_in/mpd_in.c +++ b/modules/mpd_in/mpd_in.c @@ -30,6 +30,13 @@ #include #include +typedef enum +{ + MPDIN_BUFFER_NONE=0, + MPDIN_BUFFER_MIN=1, + MPDIN_BUFFER_SEGMENTS=2 +} MpdInBuffer; + typedef struct __mpd_module { /* GPAC Service object (i.e. how this module is seen by the terminal)*/ @@ -44,8 +51,9 @@ typedef struct __mpd_module Bool connection_ack_sent; Bool in_seek; Bool memory_storage; - Bool use_max_res, immediate_switch, allow_http_abort, enable_buffering; + Bool use_max_res, immediate_switch, allow_http_abort; u32 use_low_latency; + MpdInBuffer buffer_mode; Double previous_start_range; /*max width & height in all active representations*/ u32 width, height; @@ -103,7 +111,9 @@ static void MPD_NotifyData(GF_MPDGroup *group, Bool chunk_flush) { GF_NetworkCommand com; memset(&com, 0, sizeof(GF_NetworkCommand)); - com.base.command_type = chunk_flush ? GF_NET_SERVICE_PROXY_CHUNK_RECEIVE : GF_NET_SERVICE_PROXY_SEGMENT_RECEIVE; + com.proxy_data.command_type = GF_NET_SERVICE_PROXY_DATA_RECEIVE; + com.proxy_data.is_chunk = chunk_flush; + com.proxy_data.is_live = gf_dash_is_dynamic_mpd(group->mpdin->dash); group->segment_ifce->ServiceCommand(group->segment_ifce, &com); } @@ -124,10 +134,11 @@ static GF_Err MPD_ClientQuery(GF_InputService *ifce, GF_NetworkCommand *param) for (i=0; idash); i++) { GF_MPDGroup *group; - if (!gf_dash_is_group_selected(mpdin->dash, i)) continue; + if (!gf_dash_is_group_selectable(mpdin->dash, i)) continue; group = gf_dash_get_group_udta(mpdin->dash, i); if (group->segment_ifce == ifce) { gf_dash_group_get_segment_init_url(mpdin->dash, i, ¶m->url_query.start_range, ¶m->url_query.end_range); + param->url_query.current_download = 0; return GF_OK; } } @@ -314,9 +325,12 @@ GF_InputService *MPD_GetInputServiceForChannel(GF_MPD_In *mpdin, LPNETCHANNEL ch { GF_Channel *ch; if (!channel) { - if (gf_dash_is_group_selected(mpdin->dash, 0)) { - GF_MPDGroup *mudta = gf_dash_get_group_udta(mpdin->dash, 0); - return mudta ? mudta->segment_ifce : NULL; + u32 i; + for (i=0; idash); i++) { + if (gf_dash_is_group_selectable(mpdin->dash, i)) { + GF_MPDGroup *mudta = gf_dash_get_group_udta(mpdin->dash, i); + if (mudta && mudta->segment_ifce) return mudta->segment_ifce; + } } return NULL; } @@ -391,7 +405,6 @@ static void mpdin_dash_segment_netio(void *cbk, GF_NETIO_Parameter *param) if (param->msg_type == GF_NETIO_DATA_TRANSFERED) { u32 bytes_per_sec; const char *url; - u64 start_time = gf_dm_sess_get_utc_start(group->sess); gf_dm_sess_get_stats(group->sess, NULL, &url, NULL, NULL, &bytes_per_sec, NULL); GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MPD_IN] End of file %s download at UTC "LLU" ms - estimated bandwidth %d kbps - started file or last chunk at UTC "LLU"\n", url, gf_net_get_utc(), 8*bytes_per_sec/1000, gf_dm_sess_get_utc_start(group->sess))); } @@ -520,15 +533,24 @@ GF_Err mpdin_dash_io_on_dash_event(GF_DASHFileIO *dashio, GF_DASHEventType dash_ } if (dash_evt==GF_DASH_EVENT_SELECT_GROUPS) { - const char *opt; - for (i=0; idash); i++) { - /*todo: select groups based on user criteria*/ - gf_dash_group_select(mpdin->dash, i, 1); - } - opt = gf_modules_get_option((GF_BaseInterface *)mpdin->plug, "Systems", "Language3CC"); - if (opt && strcmp(opt, "und")) - gf_dash_groups_set_language(mpdin->dash, opt); + //configure buffer in dynamic mode without low latency: we indicate how much the player will buffer + if (gf_dash_is_dynamic_mpd(mpdin->dash) && !mpdin->use_low_latency) { + u32 buffer_ms = 0; + const char *opt = gf_modules_get_option((GF_BaseInterface *)mpdin->plug, "Network", "BufferLength"); + if (opt) buffer_ms = atoi(opt); + + //use min buffer from MPD + if (mpdin->buffer_mode>=MPDIN_BUFFER_MIN) { + u32 mpd_buffer_ms = gf_dash_get_min_buffer_time(mpdin->dash); + if (mpd_buffer_ms > buffer_ms) + buffer_ms = mpd_buffer_ms; + } + if (buffer_ms) { + gf_dash_set_user_buffer(mpdin->dash, buffer_ms); + } + } + //let the player decide which group to play: we declare everything return GF_OK; } @@ -538,7 +560,8 @@ GF_Err mpdin_dash_io_on_dash_event(GF_DASHFileIO *dashio, GF_DASHEventType dash_ /*select input services if possible*/ for (i=0; idash); i++) { const char *mime, *init_segment; - if (!gf_dash_is_group_selected(mpdin->dash, i)) + //let the player decide which group to play + if (!gf_dash_is_group_selectable(mpdin->dash, i)) continue; mime = gf_dash_group_get_segment_mime(mpdin->dash, i); @@ -615,20 +638,21 @@ GF_Err mpdin_dash_io_on_dash_event(GF_DASHFileIO *dashio, GF_DASHEventType dash_ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url) { - GF_MPD_In *mpdin = (GF_MPD_In*) plug->priv; - const char *opt; - GF_Err e; - s32 shift_utc_ms; + GF_MPD_In *mpdin = (GF_MPD_In*) plug->priv; + const char *opt; + GF_Err e; + s32 shift_utc_ms, debug_adaptation_set; u32 max_cache_duration, auto_switch_count, init_timeshift; Bool use_server_utc; GF_DASHInitialSelectionMode first_select_mode; Bool keep_files, disable_switching; - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MPD_IN] Received Service Connection request (%p) from terminal for %s\n", serv, url)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MPD_IN] Received Service Connection request (%p) from terminal for %s\n", serv, url)); - if (!mpdin|| !serv || !url) return GF_BAD_PARAM; + if (!mpdin || !serv || !url) + return GF_BAD_PARAM; - mpdin->service = serv; + mpdin->service = serv; mpdin->dash_io.udta = mpdin; mpdin->dash_io.delete_cache_file = mpdin_dash_io_delete_cache_file; @@ -683,16 +707,27 @@ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const c mpdin->memory_storage = (opt && !strcmp(opt, "yes")) ? 1 : 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "UseMaxResolution"); - if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseMaxResolution", "yes"); - mpdin->use_max_res = (!opt || !strcmp(opt, "yes")) ? 1 : 0; + if (!opt) { +#if defined(_WIN32_WCE) || defined(GPAC_ANDROID) || defined(GPAC_IPHONE) + opt = "yes"; +#else + opt = "no"; +#endif + gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseMaxResolution", opt); + } + mpdin->use_max_res = !strcmp(opt, "yes") ? 1 : 0; opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "ImmediateSwitching"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "ImmediateSwitching", "no"); mpdin->immediate_switch = (opt && !strcmp(opt, "yes")) ? 1 : 0; - opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "EnableBuffering"); - if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "EnableBuffering", "no"); - mpdin->enable_buffering = (opt && !strcmp(opt, "yes")) ? 1 : 0; + opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "BufferingMode"); + if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "BufferingMode", "minBuffer"); + + if (opt && !strcmp(opt, "segments")) mpdin->buffer_mode = MPDIN_BUFFER_SEGMENTS; + else if (opt && !strcmp(opt, "none")) mpdin->buffer_mode = MPDIN_BUFFER_NONE; + else mpdin->buffer_mode = MPDIN_BUFFER_MIN; + opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "LowLatency"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "LowLatency", "no"); @@ -712,7 +747,6 @@ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const c opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "UseServerUTC"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseServerUTC", "yes"); use_server_utc = (opt && !strcmp(opt, "yes")) ? 1 : 0; - mpdin->in_seek = 0; mpdin->previous_start_range = -1; @@ -721,8 +755,8 @@ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const c opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "InitialTimeshift"); if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "InitialTimeshift", "0"); if (opt) init_timeshift = atoi(opt); - - mpdin->dash = gf_dash_new(&mpdin->dash_io, max_cache_duration, auto_switch_count, keep_files, disable_switching, first_select_mode, mpdin->enable_buffering, init_timeshift); + + mpdin->dash = gf_dash_new(&mpdin->dash_io, max_cache_duration, auto_switch_count, keep_files, disable_switching, first_select_mode, (mpdin->buffer_mode == MPDIN_BUFFER_SEGMENTS) ? 1 : 0, init_timeshift); if (!mpdin->dash) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[MPD_IN] Error - cannot create DASH Client for %s\n", url)); @@ -733,17 +767,19 @@ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const c gf_dash_set_utc_shift(mpdin->dash, shift_utc_ms); gf_dash_enable_utc_drift_compensation(mpdin->dash, use_server_utc); - opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "UseScreenResolution"); - if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseScreenResolution", "yes"); + //default mode is no for the time being + if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "UseScreenResolution", "no"); if (!opt || !strcmp(opt, "yes")) { GF_NetworkCommand com; memset(&com, 0, sizeof(GF_NetworkCommand)); com.base.command_type = GF_NET_SERVICE_MEDIA_CAP_QUERY; gf_term_on_command(serv, &com, GF_OK); + com.mcaps.width = 1920; + com.mcaps.height = 1080; if (com.mcaps.width && com.mcaps.height) { - gf_dash_set_max_resolution(mpdin->dash, com.mcaps.width, com.mcaps.height); + gf_dash_set_max_resolution(mpdin->dash, com.mcaps.width, com.mcaps.height, com.mcaps.display_bit_depth); } } @@ -757,6 +793,13 @@ GF_Err MPD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const c gf_dash_set_segment_expiration_threshold(mpdin->dash, atoi(opt)); } + + opt = gf_modules_get_option((GF_BaseInterface *)plug, "DASH", "DebugAdaptationSet"); + if (!opt) gf_modules_set_option((GF_BaseInterface *)plug, "DASH", "DebugAdaptationSet", "-1"); + debug_adaptation_set = opt ? atoi(opt) : -1; + + gf_dash_debug_group(mpdin->dash, debug_adaptation_set); + /*dash thread starts at the end of gf_dash_open */ e = gf_dash_open(mpdin->dash, url); if (e) { @@ -775,8 +818,10 @@ static GF_Descriptor *MPD_GetServiceDesc(GF_InputService *plug, u32 expect_type, for (i=0; idash); i++) { GF_Descriptor *desc; GF_MPDGroup *mudta; +#if 0 if (!gf_dash_is_group_selected(mpdin->dash, i)) continue; +#endif mudta = gf_dash_get_group_udta(mpdin->dash, i); if (!mudta) continue; if (mudta->service_descriptor_fetched) continue; @@ -847,6 +892,9 @@ GF_Err MPD_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) case GF_NET_SERVICE_QUALITY_SWITCH: gf_dash_switch_quality(mpdin->dash, com->switch_quality.up, mpdin->immediate_switch); return GF_OK; + + default: + break; } /*not supported*/ if (!com->base.on_channel) return GF_NOT_SUPPORTED; @@ -859,12 +907,15 @@ GF_Err MPD_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) /* we are interactive (that's the whole point of MPD) */ return GF_OK; - /*we should get it from MPD minBufferTime*/ case GF_NET_CHAN_BUFFER: - if (mpdin->enable_buffering) { - com->buffer.max = gf_dash_get_min_buffer_time(mpdin->dash); + /*get it from MPD minBufferTime - if not in low latency mode, indicate the value given in MPD (not possible to fetch segments earlier) - to be more precise we should get the min segment duration for this group*/ + if (!mpdin->use_low_latency && (mpdin->buffer_mode>=MPDIN_BUFFER_MIN) ) { + u32 max = gf_dash_get_min_buffer_time(mpdin->dash); + if (max>com->buffer.max) + com->buffer.max = max; + if (! gf_dash_is_dynamic_mpd(mpdin->dash)) { - com->buffer.min = 200; + com->buffer.min = 1; } } return GF_OK; @@ -904,6 +955,7 @@ GF_Err MPD_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) idx = MPD_GetGroupIndexForChannel(mpdin, com->play.on_channel); if (idx>=0) { + gf_dash_group_select(mpdin->dash, idx, GF_TRUE); gf_dash_set_group_done(mpdin->dash, idx, 0); com->play.dash_segment_switch = gf_dash_group_segment_switch_forced(mpdin->dash, idx); } @@ -911,8 +963,9 @@ GF_Err MPD_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) /*don't forward commands, we are killing the service anyway ...*/ if (gf_dash_get_period_switch_status(mpdin->dash) ) return GF_OK; } else { - s32 idx = MPD_GetGroupIndexForChannel(mpdin, com->play.on_channel); + idx = MPD_GetGroupIndexForChannel(mpdin, com->play.on_channel); if (idx>=0) + gf_dash_group_select(mpdin->dash, idx, GF_TRUE); com->play.start_range = gf_dash_group_get_start_range(mpdin->dash, idx); } @@ -963,7 +1016,7 @@ Bool MPD_CanHandleURLInService(GF_InputService *plug, const char *url) */ GF_MPD_In *mpdin = (GF_MPD_In*) plug->priv; GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MPD_IN] Received Can Handle URL In Service (%p) request from terminal for %s\n", mpdin->service, url)); - if (!plug || !plug->priv) return GF_SERVICE_ERROR; + if (!plug || !plug->priv) return GF_FALSE; if (gf_dash_get_url(mpdin->dash) && !strcmp(gf_dash_get_url(mpdin->dash) , url)) { return 1; } else { diff --git a/modules/mpegts_in/mpegts_in.c b/modules/mpegts_in/mpegts_in.c index 6f2d5fe..2c9f98d 100644 --- a/modules/mpegts_in/mpegts_in.c +++ b/modules/mpegts_in/mpegts_in.c @@ -34,7 +34,8 @@ static const char * MIMES[] = { "video/mpeg-2", "video/mp2t", "video/mpeg", NULL}; -#define M2TS_BUFFER_MAX 200 +//when regulating data rate from file using PCR, this is the maximum sleep we tolerate +#define M2TS_MAX_SLEEP 200 typedef struct { char *fragment; @@ -74,6 +75,8 @@ typedef struct Bool skip_regulation; Bool has_pending_segments; + Bool in_data_flush; + Bool hybrid_on; }M2TSIn; @@ -383,7 +386,7 @@ static void MP2TS_SendPacket(M2TSIn *m2ts, GF_M2TS_PES_PCK *pck) #endif slh.compositionTimeStampFlag = 1; slh.compositionTimeStamp = pck->PTS; - if (pck->DTS) { + if (pck->DTS != pck->PTS) { slh.decodingTimeStampFlag = 1; slh.decodingTimeStamp = pck->DTS; } @@ -666,7 +669,7 @@ static void M2TS_OnEvent(GF_M2TS_Demuxer *ts, u32 evt_type, void *param) diff = (u32) pcr_diff - (stb - ts->stb_at_last_pcr); } } - if (diff<-100) { + if (diff<-400) { GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[M2TS In] Demux not going fast enough according to PCR (drift %d, pcr: "LLU", last pcr: "LLU")\n", diff, pcr, ts->pcr_last)); } else if (diff>0) { u32 sleep_for=1; @@ -685,7 +688,7 @@ static void M2TS_OnEvent(GF_M2TS_Demuxer *ts, u32 evt_type, void *param) } /*We don't sleep for the entire buffer occupancy, because we would take the risk of starving the audio chains. We try to keep buffers half full*/ - sleep_for = MIN(com.buffer.occupancy/2, M2TS_BUFFER_MAX); + sleep_for = MIN(com.buffer.occupancy/2, M2TS_MAX_SLEEP); #ifndef GPAC_DISABLE_LOG if (!nb_sleep) { @@ -767,6 +770,35 @@ static void M2TS_OnEvent(GF_M2TS_Demuxer *ts, u32 evt_type, void *param) } } break; + + case GF_M2TS_EVT_TEMI_LOCATION: + { + GF_NetworkCommand com; + memset(&com, 0, sizeof(com)); + com.addon_info.command_type = GF_NET_ASSOCIATED_CONTENT_LOCATION; + com.addon_info.external_URL = ((GF_M2TS_TemiLocationDescriptor*)param)->external_URL; + com.addon_info.is_announce = ((GF_M2TS_TemiLocationDescriptor*)param)->is_announce; + com.addon_info.is_splicing = ((GF_M2TS_TemiLocationDescriptor*)param)->is_splicing; + com.addon_info.activation_countdown = ((GF_M2TS_TemiLocationDescriptor*)param)->activation_countdown; + com.addon_info.reload_external = ((GF_M2TS_TemiLocationDescriptor*)param)->reload_external; + com.addon_info.timeline_id = ((GF_M2TS_TemiLocationDescriptor*)param)->timeline_id; + gf_term_on_command(m2ts->service, &com, GF_OK); + } + break; + case GF_M2TS_EVT_TEMI_TIMECODE: + { + GF_NetworkCommand com; + memset(&com, 0, sizeof(com)); + com.addon_time.command_type = GF_NET_ASSOCIATED_CONTENT_TIMING; + com.addon_time.timeline_id = ((GF_M2TS_TemiTimecodeDescriptor*)param)->timeline_id; + com.addon_time.media_pts = ((GF_M2TS_TemiTimecodeDescriptor*)param)->pes_pts; + com.addon_time.media_timescale = ((GF_M2TS_TemiTimecodeDescriptor*)param)->media_timescale; + com.addon_time.media_timestamp = ((GF_M2TS_TemiTimecodeDescriptor*)param)->media_timestamp; + com.addon_time.force_reload = ((GF_M2TS_TemiTimecodeDescriptor*)param)->force_reload; + com.addon_time.is_paused = ((GF_M2TS_TemiTimecodeDescriptor*)param)->is_paused; + gf_term_on_command(m2ts->service, &com, GF_OK); + } + break; } } @@ -898,6 +930,13 @@ static GF_Err M2TS_QueryNextFile(void *udta, u32 query_type, const char **out_ur return query_ret; } +enum +{ + GF_M2TS_PUSH_SEGMENT, + GF_M2TS_PUSH_CHUNK, + GF_M2TS_FLUSH_DATA +}; + void m2ts_flush_data(M2TSIn *m2ts, u32 flush_type) { u64 start_byterange, end_byterange; @@ -905,10 +944,16 @@ void m2ts_flush_data(M2TSIn *m2ts, u32 flush_type) u32 refresh_type = 0; const char *url; + if (m2ts->in_data_flush) { + if (flush_type==GF_M2TS_PUSH_SEGMENT) + m2ts->has_pending_segments++; + return; + } gf_mx_p(m2ts->mx); + m2ts->in_data_flush = 1; //check buffer level when start of new segment - if (flush_type<=1) { + if (flush_type<=GF_M2TS_PUSH_CHUNK) { GF_NetworkCommand com; /*query buffer level on each channel, don't sleep if too low*/ memset(&com, 0, sizeof(GF_NetworkCommand)); @@ -916,22 +961,25 @@ void m2ts_flush_data(M2TSIn *m2ts, u32 flush_type) gf_term_on_command(m2ts->service, &com, GF_OK); if (com.buffer.occupancy && (com.buffer.occupancy >= com.buffer.max)) { //count completed segment that were not dispatched - if (flush_type==1) + if (flush_type==GF_M2TS_PUSH_SEGMENT) m2ts->has_pending_segments++; + m2ts->in_data_flush = 0; gf_mx_v(m2ts->mx); return; } } - else if (flush_type==2) { + else if (0 && flush_type==GF_M2TS_FLUSH_DATA) { if (! m2ts->has_pending_segments) { + m2ts->in_data_flush = 0; gf_mx_v(m2ts->mx); return; } } - e = M2TS_QueryNextFile(m2ts, (flush_type==2) ? 2 : 1, &url, &start_byterange, &end_byterange, &refresh_type); + e = M2TS_QueryNextFile(m2ts, (flush_type==GF_M2TS_FLUSH_DATA) ? 2 : 1, &url, &start_byterange, &end_byterange, &refresh_type); if (e) { + m2ts->in_data_flush = 0; gf_mx_v(m2ts->mx); return; } @@ -949,10 +997,10 @@ void m2ts_flush_data(M2TSIn *m2ts, u32 flush_type) } } + m2ts->in_data_flush = 0; gf_mx_v(m2ts->mx); } - static GF_Err M2TS_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url) { GF_Err e; @@ -991,8 +1039,13 @@ static GF_Err M2TS_ConnectService(GF_InputService *plug, GF_ClientService *serv, //get byte range if any (local playback) if (url) { u64 start_byterange, end_byterange; + gf_mx_p(m2ts->mx); + m2ts->in_data_flush = 1; M2TS_QueryNextFile(m2ts, 0, NULL, &start_byterange, &end_byterange, NULL); e = gf_m2ts_demux_file(m2ts->ts, url, start_byterange, end_byterange, 0, 0); + M2TS_QueryNextFile(m2ts, 3, NULL, NULL, NULL, NULL); + m2ts->in_data_flush = 0; + gf_mx_v(m2ts->mx); } else { e = GF_OK; } @@ -1256,17 +1309,13 @@ static GF_Err M2TS_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) } return GF_OK; } - if (com->command_type == GF_NET_SERVICE_PROXY_CHUNK_RECEIVE) { - m2ts_flush_data(m2ts, 1); - return GF_OK; - } - if (com->command_type == GF_NET_SERVICE_PROXY_SEGMENT_RECEIVE) { - m2ts_flush_data(m2ts, 0); + if (com->command_type == GF_NET_SERVICE_PROXY_DATA_RECEIVE) { + m2ts_flush_data(m2ts, com->proxy_data.is_chunk ? GF_M2TS_PUSH_CHUNK : GF_M2TS_PUSH_SEGMENT); return GF_OK; } if (com->command_type == GF_NET_SERVICE_FLUSH_DATA) { if (plug->query_proxy) - m2ts_flush_data(m2ts, 2); + m2ts_flush_data(m2ts, GF_M2TS_FLUSH_DATA); return GF_OK; } @@ -1278,16 +1327,13 @@ static GF_Err M2TS_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) return GF_NOT_SUPPORTED; /*we cannot seek stream by stream*/ case GF_NET_CHAN_INTERACTIVE: + if (m2ts->ts->file) return GF_OK; return GF_NOT_SUPPORTED; case GF_NET_CHAN_BUFFER: + //do not override config if (ts->dnload || plug->query_proxy) { if (!com->buffer.max) com->buffer.max = 1000; - com->buffer.min = com->buffer.max; - } else if (ts->file) { - com->buffer.max = M2TS_BUFFER_MAX; } - if (m2ts->low_latency_mode) - com->buffer.max = M2TS_BUFFER_MAX; return GF_OK; case GF_NET_CHAN_DURATION: com->duration.duration = ts->duration; diff --git a/modules/mse_in/mse_in.c b/modules/mse_in/mse_in.c index d45effc..8527fe6 100644 --- a/modules/mse_in/mse_in.c +++ b/modules/mse_in/mse_in.c @@ -212,6 +212,8 @@ static GF_Err MSE_ServiceCommand(GF_InputService *plug, GF_NetworkCommand *com) return GF_NOT_SUPPORTED; case GF_NET_SERVICE_FLUSH_DATA: return GF_NOT_SUPPORTED; + default: + break; } if (!com->base.on_channel) { diff --git a/modules/ogg/ogg_in.c b/modules/ogg/ogg_in.c index ca21b02..f35b6f9 100644 --- a/modules/ogg/ogg_in.c +++ b/modules/ogg/ogg_in.c @@ -224,7 +224,7 @@ static void OGG_GetStreamInfo(ogg_packet *oggpacket, OGGInfo *info) memset(info, 0, sizeof(OGGInfo)); /*vorbis*/ - if ((oggpacket->bytes >= 7) && !strncmp(&oggpacket->packet[1], "vorbis", 6)) { + if ((oggpacket->bytes >= 7) && !strncmp((char *) &oggpacket->packet[1], "vorbis", 6)) { info->streamType = GF_STREAM_AUDIO; oggpack_readinit(&opb, oggpacket->packet, oggpacket->bytes); oggpack_adv( &opb, 88); @@ -236,7 +236,7 @@ static void OGG_GetStreamInfo(ogg_packet *oggpacket, OGGInfo *info) info->type = OGG_VORBIS; } /*speex*/ - else if ((oggpacket->bytes >= 7) && !strncmp(&oggpacket->packet[0], "Speex", 5)) { + else if ((oggpacket->bytes >= 7) && !strncmp((char *) &oggpacket->packet[0], "Speex", 5)) { info->streamType = GF_STREAM_AUDIO; oggpack_readinit(&opb, oggpacket->packet, oggpacket->bytes); oggpack_adv(&opb, 224); @@ -247,19 +247,19 @@ static void OGG_GetStreamInfo(ogg_packet *oggpacket, OGGInfo *info) info->num_init_headers = 1; } /*flac*/ - else if ((oggpacket->bytes >= 4) && !strncmp(&oggpacket->packet[0], "fLaC", 4)) { + else if ((oggpacket->bytes >= 4) && !strncmp((char *) &oggpacket->packet[0], "fLaC", 4)) { info->streamType = GF_STREAM_AUDIO; info->type = 3; info->num_init_headers = OGG_FLAC; } /*theora*/ - else if ((oggpacket->bytes >= 7) && !strncmp(&oggpacket->packet[1], "theora", 6)) { + else if ((oggpacket->bytes >= 7) && !strncmp((char *) &oggpacket->packet[1], "theora", 6)) { GF_BitStream *bs; u32 fps_numerator, fps_denominator, keyframe_freq_force; info->streamType = GF_STREAM_VISUAL; info->type = OGG_THEORA; - bs = gf_bs_new(oggpacket->packet, oggpacket->bytes, GF_BITSTREAM_READ); + bs = gf_bs_new((char *) oggpacket->packet, oggpacket->bytes, GF_BITSTREAM_READ); gf_bs_read_int(bs, 56); gf_bs_read_int(bs, 8); /* major version num */ gf_bs_read_int(bs, 8); /* minor version num */ @@ -402,8 +402,8 @@ void OGG_SendPackets(OGGReader *read, OGGStream *st, ogg_packet *oggpacket) slh.randomAccessPointFlag = 1; slh.compositionTimeStampFlag = 1; slh.compositionTimeStamp = st->ogg_ts; - gf_term_on_sl_packet(read->service, st->ch, oggpacket->packet, oggpacket->bytes, &slh, GF_OK); - st->ogg_ts += gf_vorbis_check_frame(&st->vp, oggpacket->packet, oggpacket->bytes); + gf_term_on_sl_packet(read->service, st->ch, (char *) oggpacket->packet, oggpacket->bytes, &slh, GF_OK); + st->ogg_ts += gf_vorbis_check_frame(&st->vp, (char *) oggpacket->packet, oggpacket->bytes); } else if (st->info.type==OGG_THEORA) { oggpack_buffer opb; @@ -415,7 +415,7 @@ void OGG_SendPackets(OGGReader *read, OGGStream *st, ogg_packet *oggpacket) slh.randomAccessPointFlag = oggpackB_read(&opb, 1) ? 0 : 1; slh.compositionTimeStampFlag = 1; slh.compositionTimeStamp = st->ogg_ts; - gf_term_on_sl_packet(read->service, st->ch, oggpacket->packet, oggpacket->bytes, &slh, GF_OK); + gf_term_on_sl_packet(read->service, st->ch, (char *) oggpacket->packet, oggpacket->bytes, &slh, GF_OK); st->ogg_ts += 1000; } } @@ -461,7 +461,7 @@ void OGG_Process(OGGReader *read) while (ogg_stream_packetout(&st->os, &oggpacket ) > 0 ) { GF_BitStream *bs; if (st->info.type==OGG_VORBIS) - gf_vorbis_parse_header(&st->vp, oggpacket.packet, oggpacket.bytes); + gf_vorbis_parse_header(&st->vp, (char *) oggpacket.packet, oggpacket.bytes); bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); if (st->dsi) { @@ -471,7 +471,7 @@ void OGG_Process(OGGReader *read) st->dsi_len=0; } gf_bs_write_u16(bs, oggpacket.bytes); - gf_bs_write_data(bs, oggpacket.packet, oggpacket.bytes); + gf_bs_write_data(bs, (char *) oggpacket.packet, oggpacket.bytes); gf_bs_get_content(bs, (char **)&st->dsi, &st->dsi_len); gf_bs_del(bs); st->parse_headers--; diff --git a/modules/ogg/theora_dec.c b/modules/ogg/theora_dec.c index 3c2e921..acfa4dd 100644 --- a/modules/ogg/theora_dec.c +++ b/modules/ogg/theora_dec.c @@ -150,7 +150,7 @@ static GF_Err THEO_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa static GF_Err THEO_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/ogg/vorbis_dec.c b/modules/ogg/vorbis_dec.c index 8486179..d4c7173 100644 --- a/modules/ogg/vorbis_dec.c +++ b/modules/ogg/vorbis_dec.c @@ -200,7 +200,7 @@ static GFINLINE void vorbis_to_intern(u32 samples, Float **pcm, char *buf, u32 c static GF_Err VORB_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/openhevc_dec/openhevc_dec.c b/modules/openhevc_dec/openhevc_dec.c index a972c72..1b73018 100644 --- a/modules/openhevc_dec/openhevc_dec.c +++ b/modules/openhevc_dec/openhevc_dec.c @@ -30,10 +30,17 @@ #include #include -//#define OPEN_SHVC +#define OPEN_SHVC #if defined(WIN32) && !defined(_WIN32_WCE) && !defined(__GNUC__) # pragma comment(lib, "libLibOpenHevcWrapper") + +#if !defined _WIN64 +void libOpenHevcSetViewLayers(OpenHevc_Handle openHevcHandle, int val) +{ +} +#endif + #endif typedef struct @@ -41,6 +48,7 @@ typedef struct u16 ES_ID; u32 width, stride, height, out_size, pixel_ar, layer, nb_threads, luma_bpp, chroma_bpp; + Bool output_as_8bit; Bool is_init; Bool had_pic; @@ -50,41 +58,75 @@ typedef struct GF_ESD *esd; OpenHevc_Handle openHevcHandle; -#ifdef OPEN_SHVC u32 nb_layers; - Bool base_only; -#endif + u32 output_cb_size; + + u32 display_bpp; + Bool conv_to_8bit; + char *conv_buffer; + } HEVCDec; +static GF_Err HEVC_ConfigurationScalableStream(HEVCDec *ctx, GF_ESD *esd) +{ + GF_HEVCConfig *cfg = NULL; + char *data; + u32 data_len; + GF_BitStream *bs; + u32 i, j; + if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) + return GF_OK; + cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); + if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; + if (ctx->nalu_size_length != cfg->nal_unit_size) + return GF_NON_COMPLIANT_BITSTREAM; + + bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + for (i=0; i< gf_list_count(cfg->param_array); i++) { + GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i); + for (j=0; j< gf_list_count(ar->nalus); j++) { + GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j); + gf_bs_write_int(bs, sl->size, 8*ctx->nalu_size_length); + gf_bs_write_data(bs, sl->data, sl->size); + } + } + + gf_bs_get_content(bs, &data, &data_len); + gf_bs_del(bs); + libOpenHevcDecode(ctx->openHevcHandle, (u8 *)data, data_len, 0); + gf_free(data); + + libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 2); + libOpenHevcSetViewLayers(ctx->openHevcHandle, 1); + return GF_OK; +} static GF_Err HEVC_ConfigureStream(HEVCDec *ctx, GF_ESD *esd) { - u32 i, j; + u32 i, j; GF_HEVCConfig *cfg = NULL; ctx->ES_ID = esd->ESID; ctx->width = ctx->height = ctx->out_size = ctx->luma_bpp = ctx->chroma_bpp = 0; -#ifdef OPEN_SHVC ctx->nb_layers = 1; - ctx->base_only = GF_FALSE; -#endif - if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { - cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); + HEVCState hevc; + memset(&hevc, 0, sizeof(HEVCState)); + + cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; - ctx->nalu_size_length = cfg->nal_unit_size; - + ctx->nalu_size_length = cfg->nal_unit_size; + for (i=0; i< gf_list_count(cfg->param_array); i++) { - GF_HEVCParamArray *ar = gf_list_get(cfg->param_array, i); - if (ar->type==GF_HEVC_NALU_SEQ_PARAM) { - for (j=0; j< gf_list_count(ar->nalus); j++) { - GF_AVCConfigSlot *sl = gf_list_get(ar->nalus, j); - HEVCState hevc; - s32 idx; - u16 hdr = sl->data[0] << 8 | sl->data[1]; + GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i); + for (j=0; j< gf_list_count(ar->nalus); j++) { + GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j); + s32 idx; + u16 hdr = sl->data[0] << 8 | sl->data[1]; + if (ar->type==GF_HEVC_NALU_SEQ_PARAM) { idx = gf_media_hevc_read_sps(sl->data, sl->size, &hevc); ctx->width = MAX(hevc.sps[idx].width, ctx->width); ctx->height = MAX(hevc.sps[idx].height, ctx->height); @@ -92,38 +134,54 @@ static GF_Err HEVC_ConfigureStream(HEVCDec *ctx, GF_ESD *esd) ctx->chroma_bpp = MAX(hevc.sps[idx].bit_depth_chroma, ctx->chroma_bpp); if (hdr & 0x1f8) { -#ifdef OPEN_SHVC - ctx->nb_layers ++; -#endif - } - } - } - } + ctx->nb_layers ++; + } + } + else if (ar->type==GF_HEVC_NALU_VID_PARAM) { + gf_media_hevc_read_vps(sl->data, sl->size, &hevc); + } + else if (ar->type==GF_HEVC_NALU_PIC_PARAM) { + gf_media_hevc_read_pps(sl->data, sl->size, &hevc); + } + } + } gf_odf_hevc_cfg_del(cfg); - } else { + } else { ctx->nalu_size_length = 0; } -#ifdef OPEN_SHVC - ctx->openHevcHandle = libOpenHevcInit(ctx->nb_threads, ctx->nb_layers, 0); -#else ctx->openHevcHandle = libOpenHevcInit(ctx->nb_threads, ctx->threading_type); -#endif - - if (esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { - libOpenHevcCopyExtraData(ctx->openHevcHandle, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength+8); - } #ifndef GPAC_DISABLE_LOG if (gf_log_tool_level_on(GF_LOG_CODEC, GF_LOG_DEBUG) ) { libOpenHevcSetDebugMode(ctx->openHevcHandle, 1); } #endif + + + if (esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { + libOpenHevcSetActiveDecoders(ctx->openHevcHandle, ctx->nb_layers); + libOpenHevcSetViewLayers(ctx->openHevcHandle, ctx->nb_layers-1); + + libOpenHevcCopyExtraData(ctx->openHevcHandle, (u8 *) esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); + } else { + //hardcoded values: 2 layers max, display layer 0 + libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 1/*ctx->nb_layers*/); + libOpenHevcSetViewLayers(ctx->openHevcHandle, 0/*ctx->nb_layers-1*/); + } + libOpenHevcStartDecoder(ctx->openHevcHandle); ctx->stride = ((ctx->luma_bpp==8) && (ctx->chroma_bpp==8)) ? ctx->width : ctx->width * 2; ctx->out_size = ctx->stride * ctx->height * 3 / 2; + + if (ctx->output_as_8bit && (ctx->stride>ctx->width)) { + ctx->stride /=2; + ctx->out_size /= 2; + ctx->chroma_bpp = ctx->luma_bpp = 8; + ctx->conv_to_8bit = 1; + } return GF_OK; } @@ -141,9 +199,6 @@ static GF_Err HEVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) sOpt = gf_modules_get_option((GF_BaseInterface *)ifcg, "OpenHEVC", "NumThreads"); if (!sOpt) { char szO[100]; - //checkme I have perf using too many threads - if (nb_threads > 6) - nb_threads = 6; sprintf(szO, "%d", nb_threads); gf_modules_set_option((GF_BaseInterface *)ifcg, "OpenHEVC", "NumThreads", szO); ctx->nb_threads = nb_threads; @@ -157,14 +212,27 @@ static GF_Err HEVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) sOpt = gf_modules_get_option((GF_BaseInterface *)ifcg, "OpenHEVC", "ThreadingType"); if (sOpt && !strcmp(sOpt, "wpp")) ctx->threading_type = 2; - else if (sOpt && !strcmp(sOpt, "frame+wpp")) ctx->threading_type = 3; + else if (sOpt && !strcmp(sOpt, "frame+wpp")) ctx->threading_type = 4; else { ctx->threading_type = 1; - if (!sOpt) gf_modules_set_option((GF_BaseInterface *)ifcg, "OpenHEVC", "ThreadingType", "frame+wpp"); + if (!sOpt) gf_modules_set_option((GF_BaseInterface *)ifcg, "OpenHEVC", "ThreadingType", "frame"); } + sOpt = gf_modules_get_option((GF_BaseInterface *)ifcg, "Systems", "Output8bit"); + if (!sOpt) gf_modules_set_option((GF_BaseInterface *)ifcg, "Systems", "Output8bit", (ctx->display_bpp>8) ? "no" : "yes"); + if (sOpt && !strcmp(sOpt, "yes")) ctx->output_as_8bit = 1; + + sOpt = gf_modules_get_option((GF_BaseInterface *)ifcg, "OpenHEVC", "CBUnits"); + if (!sOpt) gf_modules_set_option((GF_BaseInterface *)ifcg, "OpenHEVC", "CBUnits", "4"); + if (sOpt) ctx->output_cb_size = atoi(sOpt); + if (!ctx->output_cb_size) ctx->output_cb_size = 4; - /*once base layer is configured, nothing to do on enhancement*/ - if (esd->dependsOnESID) return GF_OK; + + + /*RTP case: configure enhancement now*/ + if (esd->dependsOnESID) { + HEVC_ConfigurationScalableStream(ctx, esd); + return GF_OK; + } ctx->esd = esd; return HEVC_ConfigureStream(ctx, esd); @@ -180,6 +248,8 @@ static GF_Err HEVC_DetachStream(GF_BaseDecoder *ifcg, u16 ES_ID) ctx->is_init = GF_FALSE; } ctx->width = ctx->height = ctx->out_size = 0; + if (ctx->conv_buffer) gf_free(ctx->conv_buffer); + ctx->conv_buffer = NULL; return GF_OK; } @@ -190,7 +260,7 @@ static GF_Err HEVC_GetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability *cap switch (capability->CapCode) { case GF_CODEC_RESILIENT: - capability->cap.valueInt = 1; + capability->cap.valueInt = 2; break; case GF_CODEC_WIDTH: capability->cap.valueInt = ctx->width; @@ -200,6 +270,13 @@ static GF_Err HEVC_GetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability *cap break; case GF_CODEC_STRIDE: capability->cap.valueInt = ctx->stride; + if (ctx->direct_output && !ctx->conv_buffer) { + //to fix soon - currently hardcoded to 32 pixels + if ((ctx->luma_bpp==8) && (ctx->chroma_bpp==8)) + capability->cap.valueInt += 32; + else + capability->cap.valueInt += 64; + } break; case GF_CODEC_PAR: capability->cap.valueInt = ctx->pixel_ar; @@ -214,7 +291,10 @@ static GF_Err HEVC_GetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability *cap capability->cap.valueInt = 1; break; case GF_CODEC_BUFFER_MAX: - capability->cap.valueInt = 4; + capability->cap.valueInt = ctx->output_cb_size; + break; + case GF_CODEC_WANTS_THREAD: + capability->cap.valueBool= GF_TRUE; break; case GF_CODEC_PADDING_BYTES: capability->cap.valueInt = 32; @@ -222,6 +302,9 @@ static GF_Err HEVC_GetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability *cap case GF_CODEC_REORDER: capability->cap.valueInt = 1; break; + case GF_CODEC_TRUSTED_CTS: + capability->cap.valueInt = 1; + break; case GF_CODEC_DIRECT_OUTPUT: capability->cap.valueBool = 1; break; @@ -237,22 +320,26 @@ static GF_Err HEVC_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa { HEVCDec *ctx = (HEVCDec*) ifcg->privateStack; switch (capability.CapCode) { + case GF_CODEC_DISPLAY_BPP: + ctx->display_bpp = capability.cap.valueInt; + return GF_OK; case GF_CODEC_WAIT_RAP: if (ctx->openHevcHandle) libOpenHevcFlush(ctx->openHevcHandle); return GF_OK; -#ifdef OPEN_SHVC case GF_CODEC_MEDIA_SWITCH_QUALITY: /*switch up*/ - if (capability.cap.valueInt) { - ctx->base_only = GF_FALSE; + if (capability.cap.valueInt > 0) { + libOpenHevcSetViewLayers(ctx->openHevcHandle, 1); } else { - ctx->base_only = GF_TRUE; + libOpenHevcSetViewLayers(ctx->openHevcHandle, 0); } return GF_OK; -#endif case GF_CODEC_DIRECT_OUTPUT: ctx->direct_output = GF_TRUE; + if (ctx->conv_to_8bit && ctx->out_size) + ctx->conv_buffer = gf_realloc(ctx->conv_buffer, sizeof(char)*ctx->out_size); + return GF_OK; } /*return unsupported to avoid confusion by the player (like color space changing ...) */ @@ -260,11 +347,10 @@ static GF_Err HEVC_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa } -static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLength ) +static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLength, u32 *CTS) { unsigned int a_w, a_h, a_stride, bit_depth; OpenHevc_Frame_cpy openHevcFrame; - u8 *pY, *pU, *pV; libOpenHevcGetPictureInfo(ctx->openHevcHandle, &openHevcFrame.frameInfo); @@ -273,7 +359,18 @@ static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLe a_h = openHevcFrame.frameInfo.nHeight; a_stride = openHevcFrame.frameInfo.nYPitch; bit_depth = openHevcFrame.frameInfo.nBitDepth; - if ((ctx->luma_bpp>8) || (ctx->chroma_bpp>8)) a_stride *= 2; + + *CTS = (u32) openHevcFrame.frameInfo.nTimeStamp; + + if (!ctx->output_as_8bit) { + if ((ctx->luma_bpp>8) || (ctx->chroma_bpp>8)) a_stride *= 2; + } else { + if (bit_depth>8) { + bit_depth=8; + + ctx->conv_to_8bit = 1; + } + } if ((ctx->width != a_w) || (ctx->height!=a_h) || (ctx->stride != a_stride) || (ctx->luma_bpp!= bit_depth) || (ctx->chroma_bpp != bit_depth) ){ ctx->width = a_w; @@ -284,24 +381,41 @@ static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLe ctx->luma_bpp = ctx->chroma_bpp = bit_depth; /*always force layer resize*/ *outBufferLength = ctx->out_size; + + if (ctx->conv_to_8bit && ctx->direct_output) { + ctx->conv_buffer = gf_realloc(ctx->conv_buffer, sizeof(char)*ctx->out_size); + } return GF_BUFFER_TOO_SMALL; } - if (ctx->direct_output) { - OpenHevc_Frame HVCFrame; - libOpenHevcGetOutput(ctx->openHevcHandle, 1, &HVCFrame); + if (!ctx->conv_to_8bit && ctx->direct_output) { *outBufferLength = ctx->out_size; ctx->has_pic = GF_TRUE; } else { - - pY = outBuffer; - pU = outBuffer + ctx->stride * ctx->height; - pV = outBuffer + 5*ctx->stride * ctx->height/4; - openHevcFrame.pvY = (void*) pY; - openHevcFrame.pvU = (void*) pU; - openHevcFrame.pvV = (void*) pV; - *outBufferLength = 0; - if (libOpenHevcGetOutputCpy(ctx->openHevcHandle, 1, &openHevcFrame)) { - *outBufferLength = ctx->out_size; + if (ctx->conv_to_8bit) { + OpenHevc_Frame openHevcFramePtr; + if (libOpenHevcGetOutput(ctx->openHevcHandle, 1, &openHevcFramePtr)) { + GF_VideoSurface dst; + memset(&dst, 0, sizeof(GF_VideoSurface)); + dst.width = ctx->width; + dst.height = ctx->height; + dst.pitch_y = ctx->width; + dst.video_buffer = ctx->direct_output ? ctx->conv_buffer : outBuffer; + dst.pixel_format = GF_PIXEL_YV12; + + gf_color_write_yv12_10_to_yuv(&dst, (u8 *) openHevcFramePtr.pvY, (u8 *) openHevcFramePtr.pvU, (u8 *) openHevcFramePtr.pvV, (openHevcFramePtr.frameInfo.nYPitch + 32)*2, ctx->width, ctx->height, NULL); + *outBufferLength = ctx->out_size; + + if (ctx->direct_output ) + ctx->has_pic = GF_TRUE; + } + } else { + openHevcFrame.pvY = (void*) outBuffer; + openHevcFrame.pvU = (void*) (outBuffer + ctx->stride * ctx->height); + openHevcFrame.pvV = (void*) (outBuffer + 5*ctx->stride * ctx->height/4); + *outBufferLength = 0; + if (libOpenHevcGetOutputCpy(ctx->openHevcHandle, 1, &openHevcFrame)) { + *outBufferLength = ctx->out_size; + } } } return GF_OK; @@ -310,7 +424,7 @@ static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLe static GF_Err HEVC_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { @@ -320,18 +434,16 @@ static GF_Err HEVC_ProcessData(GF_MediaDecoder *ifcg, if (!inBuffer) { if ( libOpenHevcDecode(ctx->openHevcHandle, NULL, 0, 0) ) { - return HEVC_flush_picture(ctx, outBuffer, outBufferLength); + return HEVC_flush_picture(ctx, outBuffer, outBufferLength, CTS); } return GF_OK; } - - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("HEVC: Decoding AU %d bytes\n", inBufferLength)); - - if (!ES_ID || (ES_ID!=ctx->ES_ID) ) { + if (!ES_ID) { *outBufferLength = 0; return GF_OK; } + if (*outBufferLength < ctx->out_size) { *outBufferLength = ctx->out_size; return GF_BUFFER_TOO_SMALL; @@ -341,15 +453,16 @@ static GF_Err HEVC_ProcessData(GF_MediaDecoder *ifcg, if (ctx->had_pic) { ctx->had_pic = 0; - return HEVC_flush_picture(ctx, outBuffer, outBufferLength); + return HEVC_flush_picture(ctx, outBuffer, outBufferLength, CTS); } - - got_pic = libOpenHevcDecode(ctx->openHevcHandle, inBuffer, inBufferLength, 0); + got_pic = libOpenHevcDecode(ctx->openHevcHandle, (u8 *) inBuffer, inBufferLength, *CTS); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[HEVC Decoder] Decode CTS %d - size %d - got pic %d\n", *CTS, inBufferLength, got_pic)); if (got_pic>0) { - e = HEVC_flush_picture(ctx, outBuffer, outBufferLength); + e = HEVC_flush_picture(ctx, outBuffer, outBufferLength, CTS); if (e) return e; got_pic = 0; } + return GF_OK; } @@ -362,6 +475,13 @@ static GF_Err HEVC_GetOutputBuffer(GF_MediaDecoder *ifcg, u16 ESID, u8 **pY_or_R if (!ctx->has_pic) return GF_BAD_PARAM; ctx->has_pic = GF_FALSE; + if (ctx->conv_buffer) { + *pY_or_RGB = (u8 *) ctx->conv_buffer; + *pU = (u8 *) ctx->conv_buffer + ctx->stride * ctx->height; + *pV = (u8 *) ctx->conv_buffer + 5*ctx->stride * ctx->height/4; + return GF_OK; + } + res = libOpenHevcGetOutput(ctx->openHevcHandle, 1, &openHevcFrame); if ((res<=0) || !openHevcFrame.pvY || !openHevcFrame.pvU || !openHevcFrame.pvV) return GF_SERVICE_ERROR; diff --git a/modules/opensvc_dec/opensvc_dec.c b/modules/opensvc_dec/opensvc_dec.c index 006156e..742b0a0 100644 --- a/modules/opensvc_dec/opensvc_dec.c +++ b/modules/opensvc_dec/opensvc_dec.c @@ -80,13 +80,14 @@ static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) /*decode all NALUs*/ count = gf_list_count(cfg->sequenceParameterSets); - SetCommandLayer(Layer, 255, 0, &i, 0);//bufindex can be reset without pb + SetCommandLayer(Layer, 255, 0, &res, 0);//bufindex can be reset without pb for (i=0; isequenceParameterSets, i); #ifndef GPAC_DISABLE_AV_PARSERS - gf_avc_get_sps_info(slc->data, slc->size, &slc->id, &w, &h, &par_n, &par_d); + gf_avc_get_sps_info(slc->data, slc->size, &sid, &w, &h, &par_n, &par_d); #endif /*by default use the base layer*/ if (!i) { @@ -97,7 +98,7 @@ static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) ctx->pixel_ar = (par_n<<16) || par_d; } } - res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer); + res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res)); } @@ -109,7 +110,7 @@ static GF_Err OSVC_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) u32 sps_id, pps_id; GF_AVCConfigSlot *slc = gf_list_get(cfg->pictureParameterSets, i); gf_avc_get_pps_info(slc->data, slc->size, &pps_id, &sps_id); - res = decodeNAL(ctx->codec, slc->data, slc->size, &Picture, Layer); + res = decodeNAL(ctx->codec, (unsigned char *) slc->data, slc->size, &Picture, Layer); if (res<0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); } @@ -211,7 +212,7 @@ static GF_Err OSVC_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa static GF_Err OSVC_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { @@ -233,7 +234,7 @@ static GF_Err OSVC_ProcessData(GF_MediaDecoder *ifcg, return GF_BUFFER_TOO_SMALL; } - ctx->MaxDqId = GetDqIdMax(inBuffer, inBufferLength, ctx->nalu_size_length, ctx->DqIdTable, ctx->nalu_size_length ? 1 : 0); + ctx->MaxDqId = GetDqIdMax((unsigned char *) inBuffer, inBufferLength, ctx->nalu_size_length, ctx->DqIdTable, ctx->nalu_size_length ? 1 : 0); if (!ctx->init_layer_set) { //AVC stream in a h264 file if (ctx->MaxDqId == -1) @@ -249,12 +250,12 @@ static GF_Err OSVC_ProcessData(GF_MediaDecoder *ifcg, got_pic = 0; nalu_size = 0; - ptr = inBuffer; + ptr = (u8 *) inBuffer; if (!ctx->nalu_size_length) { u32 size; sc_size = 0; - size = gf_media_nalu_next_start_code(inBuffer, inBufferLength, &sc_size); + size = gf_media_nalu_next_start_code((u8 *) inBuffer, inBufferLength, &sc_size); if (sc_size) { ptr += size+sc_size; assert(inBufferLength >= size+sc_size); diff --git a/modules/osd/osd.c b/modules/osd/osd.c index 46ed339..a2fcffa 100644 --- a/modules/osd/osd.c +++ b/modules/osd/osd.c @@ -5,7 +5,7 @@ * Copyright (c) Telecom ParisTech 2011-2012 * All rights reserved * - * This file is part of GPAC / User Event Recorder sub-project + * This file is part of GPAC / Sampe On-Scvreen Display sub-project * * GPAC is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by diff --git a/modules/rtp_in/rtp_in.h b/modules/rtp_in/rtp_in.h index 5fd5fe7..16ebb1a 100644 --- a/modules/rtp_in/rtp_in.h +++ b/modules/rtp_in/rtp_in.h @@ -94,7 +94,7 @@ typedef struct Bool session_migration; - Bool is_svc; + Bool is_scalable; u32 cur_mid; } RTPClient; diff --git a/modules/rtp_in/sdp_load.c b/modules/rtp_in/sdp_load.c index 0dd1a91..52c9124 100644 --- a/modules/rtp_in/sdp_load.c +++ b/modules/rtp_in/sdp_load.c @@ -57,7 +57,7 @@ GF_Err RP_SetupSDP(RTPClient *rtp, GF_SDPInfo *sdp, RTPStream *stream) else if (!strcmp(att->Name, "x-session-name")) url = att->Value; else if (!strcmp(att->Name, "x-session-id")) session_id = att->Value; /*we have the H264-SVC streams*/ - else if (!strcmp(att->Name, "group") && !strncmp(att->Value, "DDP", 3)) rtp->is_svc = 1; + else if (!strcmp(att->Name, "group") && !strncmp(att->Value, "DDP", 3)) rtp->is_scalable = 1; } if (range) { Start = range->start; @@ -215,7 +215,7 @@ static GF_ObjectDescriptor *RP_GetChannelOD(RTPStream *ch, u32 ch_idx) gf_list_add(od->ESDescriptors, esd); // for each channel depending on this channel, get esd, set esd->dependsOnESID and add to od - if (ch->owner->is_svc) + if (ch->owner->is_scalable) { u32 i, count; diff --git a/modules/rvc_dec/rvc_dec.c b/modules/rvc_dec/rvc_dec.c index 7e5ea36..654126c 100644 --- a/modules/rvc_dec/rvc_dec.c +++ b/modules/rvc_dec/rvc_dec.c @@ -76,7 +76,7 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) RVCDec *ctx = (RVCDec*) ifcg->privateStack; char* VTLFolder; char *XDF_doc = NULL; - int isAVCFile; + int isNALUFile; /*not supported in this version*/ if (esd->dependsOnESID) return GF_NOT_SUPPORTED; @@ -86,10 +86,10 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) VTLFolder = (char *)gf_modules_get_option((GF_BaseInterface *)ifcg, "RVCDecoder", "VTLPath"); if (!VTLFolder) { - GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC_Dec] Cannot locate VTL: path is unknown. Please indicate path in GPAC config file:\n[RVCDecoder]\nVTLPath=PATH\n")); + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Cannot locate VTL: path is unknown. Please indicate path in GPAC config file:\n[RVCDecoder]\nVTLPath=PATH\n")); return GF_SERVICE_ERROR; } else { - GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[RVC_Dec] Using VTL in %s\n", VTLFolder)); + GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[RVC Decoder] Using VTL in %s\n", VTLFolder)); } /*initialize RVC*/ @@ -120,10 +120,19 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) } - if(esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) isAVCFile = 1; - else isAVCFile = 0; + switch (esd->decoderConfig->objectTypeIndication) { + case GPAC_OTI_VIDEO_AVC: + case GPAC_OTI_VIDEO_SVC: + case GPAC_OTI_VIDEO_HEVC: + case GPAC_OTI_VIDEO_SHVC: + isNALUFile = 1; + break; + default: + isNALUFile = 0; + break; + } - rvc_init(XDF_doc, VTLFolder, isAVCFile); //->data contains the uncompressed XDF + rvc_init(XDF_doc, VTLFolder, isNALUFile); //->data contains the uncompressed XDF /*free data*/ gf_free(XDF_doc); @@ -137,7 +146,7 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) return GF_OK; /*initialize the decoder */ - if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) { + if ( (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC) || (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_SVC)) { GF_AVCConfig *cfg = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; ctx->nalu_size_length = cfg->nal_unit_size; @@ -158,29 +167,63 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) ctx->pixel_ar = (par_n<<16) || par_d; } } - /* call decode - warning for AVC: the data blocks do not contain startcode prefixes (00000001), you may need to add them) */ + /* call decode*/ res = rvc_decode(slc->data, slc->size, &Picture, 1); if (res<0) { - GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding SPS %d\n", res)); + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Error decoding SPS %d\n", res)); } - } count = gf_list_count(cfg->pictureParameterSets); for (i=0; ipictureParameterSets, i); - /*same remark as above*/ - - res = rvc_decode(slc->data, slc->size, &Picture, 1); if (res<0) { - GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Error decoding PPS %d\n", res)); } } gf_odf_avc_cfg_del(cfg); + /*initialize the decoder */ + } else if ( (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC) || (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_SHVC)) { + GF_HEVCConfig *cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); + if (!cfg) return GF_NON_COMPLIANT_BITSTREAM; + ctx->nalu_size_length = cfg->nal_unit_size; + + /*decode all NALUs*/ + count = gf_list_count(cfg->param_array); + for (i=0; iparam_array, i); + count2 = gf_list_count(ar->nalus); + for (j=0; jnalus, j); + + if (ar->type==GF_HEVC_NALU_SEQ_PARAM) { + gf_hevc_get_sps_info(slc->data, slc->size, &slc->id, &w, &h, &par_n, &par_d); + /*by default use the base layer*/ + if (!j) { + if ((ctx->widthheightwidth = w; + ctx->height = h; + if ( ((s32)par_n>0) && ((s32)par_d>0) ) + ctx->pixel_ar = (par_n<<16) || par_d; + } + } + } + + /* call decode*/ + res = rvc_decode(slc->data, slc->size, &Picture, 1); + if (res<0) { + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Error decoding parameter set: %d\n", res)); + } + } + } + + gf_odf_hevc_cfg_del(cfg); } else if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_MPEG4_PART2) { GF_M4VDecSpecInfo dsi; GF_Err e; @@ -195,7 +238,7 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) res = rvc_decode(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &Picture, 1); if (res<0) { - GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Error decoding PPS %d\n", res)); } @@ -204,7 +247,7 @@ static GF_Err RVCD_AttachStream(GF_BaseDecoder *ifcg, GF_ESD *esd) res = rvc_decode(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &Picture, 1); if (res<0) { - GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[SVC Decoder] Error decoding PPS %d\n", res)); + GF_LOG(GF_LOG_ERROR, GF_LOG_CODEC, ("[RVC Decoder] Error decoding PPS %d\n", res)); } } @@ -280,7 +323,7 @@ int bookmark = 0; static GF_Err RVCD_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { @@ -375,7 +418,10 @@ static u32 RVCD_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd if (!esd) return GF_CODEC_STREAM_TYPE_SUPPORTED; switch (esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_VIDEO_AVC: + case GPAC_OTI_VIDEO_SVC: case GPAC_OTI_VIDEO_MPEG4_PART2: + case GPAC_OTI_VIDEO_HEVC: + case GPAC_OTI_VIDEO_SHVC: if (!esd->decoderConfig->rvc_config && !esd->decoderConfig->predefined_rvc_config) return GF_CODEC_NOT_SUPPORTED; return GF_CODEC_SUPPORTED+1; } @@ -384,7 +430,7 @@ static u32 RVCD_CanHandleStream(GF_BaseDecoder *dec, u32 StreamType, GF_ESD *esd static const char *RVCD_GetCodecName(GF_BaseDecoder *dec) { - return "RVC Decoder"; + return "Reconfigurable Video Decoder"; } GF_BaseDecoder *NewRVCDec() diff --git a/modules/sdl_out/video.c b/modules/sdl_out/video.c index e8505b1..8dad54d 100644 --- a/modules/sdl_out/video.c +++ b/modules/sdl_out/video.c @@ -674,11 +674,13 @@ static Bool SDLVid_InitializeWindow(SDLVidCtx *ctx, GF_VideoOutput *dr) SDL_GetDesktopDisplayMode(0,&vinf); dr->max_screen_width = vinf.w; dr->max_screen_height = vinf.h; + dr->max_screen_bpp = 8; #else vinf = SDL_GetVideoInfo(); #if SDL_VERSION_ATLEAST(1, 2, 10) dr->max_screen_width = vinf->current_w; dr->max_screen_height = vinf->current_h; + dr->max_screen_bpp = 8; #else { SDL_Rect** modes; @@ -700,6 +702,7 @@ static Bool SDLVid_InitializeWindow(SDLVidCtx *ctx, GF_VideoOutput *dr) } } } + dr->max_screen_bpp = 8; #endif /* versions prior to 1.2.10 do not have the size of screen */ #endif @@ -717,12 +720,15 @@ static void SDLVid_ResetWindow(SDLVidCtx *ctx) { SDLVid_DestroyObjects(ctx); #if SDL_VERSION_ATLEAST(2,0,0) - if ( ctx->gl_context ) + if ( ctx->gl_context ) { SDL_GL_DeleteContext(ctx->gl_context); - if ( ctx->renderer ) + ctx->gl_context = NULL; + } + if ( ctx->renderer ) { SDL_DestroyRenderer(ctx->renderer); - ctx->gl_context = NULL; - + ctx->renderer = NULL; + } + /*iOS SDL2 has a nasty bug that breaks switching between 2D and GL context if we don't re-init the video subsystem*/ #ifdef GPAC_IPHONE if ( ctx->screen ) { @@ -1278,7 +1284,7 @@ static GF_Err SDLVid_Flush(GF_VideoOutput *dr, GF_Window *dest) dst.width = wndSurface->w; dst.pitch_x = 0; dst.pitch_y = wndSurface->pitch; - dst.pixel_format = SDLVid_MapPixelFormat(wndSurface->format, SDL_FALSE); + dst.pixel_format = SDLVid_MapPixelFormat(wndSurface->format, GF_FALSE); dst.video_buffer = (char*)wndSurface->pixels; #else SDL_LockSurface(ctx->screen); diff --git a/modules/widgetman/widget.c b/modules/widgetman/widget.c index d6ce300..0bd8c5b 100644 --- a/modules/widgetman/widget.c +++ b/modules/widgetman/widget.c @@ -385,8 +385,10 @@ SMJS_FUNC_PROP_SET( widget_setProperty) /*avoids GCC warning*/ if (!obj) obj = NULL; +#ifndef GPAC_CONFIG_DARWIN if (!id) id=0; - if (!vp) vp=0; +#endif + if (!vp) vp=0; return JS_TRUE; } diff --git a/modules/widgetman/widgetman.c b/modules/widgetman/widgetman.c index 5efd88f..a9dc711 100644 --- a/modules/widgetman/widgetman.c +++ b/modules/widgetman/widgetman.c @@ -3380,7 +3380,7 @@ GF_WidgetInstance *wm_load_widget(GF_WidgetManager *wm, const char *path, u32 In wi->instance_id ++; sprintf(szName, "%s#%s#Instance%d", path, wi->widget->name, wi->instance_id); - sprintf(wi->secname, "Widget#%08X", gf_crc_32(szName, (u32) strlen(szName))); + sprintf((char *)wi->secname, "Widget#%08X", gf_crc_32(szName, (u32) strlen(szName))); /*create section*/ gf_cfg_set_key(wm->term->user->config, "Widgets", (const char *) wi->secname, " "); @@ -3461,7 +3461,7 @@ static Bool wm_enum_widget(void *cbk, char *file_name, char *file_path) static Bool wm_enum_dir(void *cbk, char *file_name, char *file_path) { - return gf_enum_directory(file_path, 0, wm_enum_widget, cbk, "mgt"); + return (gf_enum_directory(file_path, 0, wm_enum_widget, cbk, "mgt")==GF_OK) ? GF_FALSE : GF_TRUE; } @@ -3484,7 +3484,7 @@ static JSBool SMJS_FUNCTION(wm_initialize) u32 instID = ID ? atoi(ID) : 0; GF_WidgetInstance *wi = wm_load_widget(wm, manifest, instID, 0); if (wi) { - strcpy(wi->secname, (const char *) name); + strcpy((char *)wi->secname, (const char *) name); wm_widget_jsbind(wm, wi); } } diff --git a/modules/x11_out/x11_out.c b/modules/x11_out/x11_out.c index d8bc66a..d8ee106 100644 --- a/modules/x11_out/x11_out.c +++ b/modules/x11_out/x11_out.c @@ -728,18 +728,23 @@ static GF_Err X11_SetupGL(GF_VideoOutput *vout) GF_Event evt; XWindow *xWin = (XWindow *)vout->opaque; - GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[X11] Setting up GL for display %d\n", xWin->display)); - XSync(xWin->display, False); - xWin->glx_context = glXCreateContext(xWin->display,xWin->glx_visualinfo, NULL, True); - XSync(xWin->display, False); - if (!xWin->glx_context) return GF_IO_ERR; - if (xWin->output_3d_mode==2) return GF_IO_ERR; + if (!xWin->glx_visualinfo) return GF_IO_ERR; + memset(&evt, 0, sizeof(GF_Event)); + if (!xWin->glx_context) { + GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[X11] Setting up GL for display %d\n", xWin->display)); + XSync(xWin->display, False); + xWin->glx_context = glXCreateContext(xWin->display,xWin->glx_visualinfo, NULL, True); + XSync(xWin->display, False); + if (!xWin->glx_context) return GF_IO_ERR; + if (xWin->output_3d_mode==2) return GF_IO_ERR; + + evt.setup.hw_reset = 1; + } if ( ! glXMakeCurrent(xWin->display, xWin->fullscreen ? xWin->full_wnd : xWin->wnd, xWin->glx_context) ) return GF_IO_ERR; XSync(xWin->display, False); - memset(&evt, 0, sizeof(GF_Event)); + evt.type = GF_EVENT_VIDEO_SETUP; - evt.setup.hw_reset = 1; vout->on_event (vout->evt_cbk_hdl,&evt); xWin->is_init = 1; return GF_OK; @@ -997,7 +1002,7 @@ GF_Err X11_SetFullScreen (struct _video_out * vout, u32 bFullScreenOn, u32 * scr X11VID (); xWindow->fullscreen = bFullScreenOn; #ifdef GPAC_HAS_OPENGL - if (xWindow->output_3d_mode==1) X11_ReleaseGL(xWindow); +// if (xWindow->output_3d_mode==1) X11_ReleaseGL(xWindow); #endif if (bFullScreenOn) { @@ -1156,6 +1161,8 @@ X11_SetupWindow (GF_VideoOutput * vout) xWindow->screennum=0; vout->max_screen_width = DisplayWidth(xWindow->display, xWindow->screennum); vout->max_screen_height = DisplayHeight(xWindow->display, xWindow->screennum); + vout->max_screen_bpp = 8; + /* * Full screen wnd */ @@ -1347,9 +1354,9 @@ xWindow->screennum=0; } retry_8bpp: i=0; - attribs[i++] = GLX_DRAWABLE_TYPE; - attribs[i++] = GLX_WINDOW_BIT; if (nb_bits>8) { + attribs[i++] = GLX_DRAWABLE_TYPE; + attribs[i++] = GLX_WINDOW_BIT; attribs[i++] = GLX_RENDER_TYPE; attribs[i++] = GLX_RGBA_BIT; } else { @@ -1393,9 +1400,9 @@ retry_8bpp: typedef int (* FnGlXGetFBConfigAttrib) (Display * dpy, GLXFBConfig config, int attribute, int * value); - FnGlXChooseFBConfigProc my_glXChooseFBConfig = (FnGlXChooseFBConfigProc) glXGetProcAddress("glXChooseFBConfig"); - FnGlXGetVisualFromFBConfigProc my_glXGetVisualFromFBConfig = (FnGlXGetVisualFromFBConfigProc)glXGetProcAddress("glXGetVisualFromFBConfig"); - FnGlXGetFBConfigAttrib my_glXGetFBConfigAttrib = (FnGlXGetFBConfigAttrib)glXGetProcAddress("glXGetFBConfigAttrib"); + FnGlXChooseFBConfigProc my_glXChooseFBConfig = (FnGlXChooseFBConfigProc) glXGetProcAddress((GLubyte*) "glXChooseFBConfig"); + FnGlXGetVisualFromFBConfigProc my_glXGetVisualFromFBConfig = (FnGlXGetVisualFromFBConfigProc)glXGetProcAddress((GLubyte*) "glXGetVisualFromFBConfig"); + FnGlXGetFBConfigAttrib my_glXGetFBConfigAttrib = (FnGlXGetFBConfigAttrib)glXGetProcAddress((GLubyte*) "glXGetFBConfigAttrib"); if (my_glXChooseFBConfig && my_glXGetVisualFromFBConfig) { fb = my_glXChooseFBConfig(xWindow->display, xWindow->screennum, attribs, &fbcount); @@ -1406,7 +1413,7 @@ retry_8bpp: nb_bits = 8; goto retry_8bpp; } - xWindow->glx_visualinfo = my_glXGetVisualFromFBConfig(xWindow->display, fb[0]); + xWindow->glx_visualinfo = my_glXGetVisualFromFBConfig(xWindow->display, fb[0]); if (my_glXGetFBConfigAttrib && fb) { int r, g, b; @@ -1418,6 +1425,7 @@ retry_8bpp: } else { xWindow->glx_visualinfo = glXChooseVisual(xWindow->display, xWindow->screennum, attribs); } + vout->max_screen_bpp = nb_bits; if (!xWindow->glx_visualinfo) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[X11] Error selecting GL display\n")); diff --git a/modules/xvid_dec/xvid_dec.c b/modules/xvid_dec/xvid_dec.c index 98e7823..8376a4f 100644 --- a/modules/xvid_dec/xvid_dec.c +++ b/modules/xvid_dec/xvid_dec.c @@ -240,7 +240,7 @@ static GF_Err XVID_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa } static GF_Err XVID_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/modules/xvid_dec/xvid_dec_wce.cpp b/modules/xvid_dec/xvid_dec_wce.cpp index e215f85..48fa777 100644 --- a/modules/xvid_dec/xvid_dec_wce.cpp +++ b/modules/xvid_dec/xvid_dec_wce.cpp @@ -157,7 +157,7 @@ static GF_Err XVID_SetCapabilities(GF_BaseDecoder *ifcg, GF_CodecCapability capa } static GF_Err XVID_ProcessData(GF_MediaDecoder *ifcg, char *inBuffer, u32 inBufferLength, - u16 ES_ID, + u16 ES_ID, u32 *CTS, char *outBuffer, u32 *outBufferLength, u8 PaddingBits, u32 mmlevel) { diff --git a/regression_tests/bifs/bifs-2D-interactivity-mousesensor.bt b/regression_tests/bifs/bifs-2D-interactivity-mousesensor.bt index c22f1b4..9a69983 100644 --- a/regression_tests/bifs/bifs-2D-interactivity-mousesensor.bt +++ b/regression_tests/bifs/bifs-2D-interactivity-mousesensor.bt @@ -158,7 +158,7 @@ AT 0 { ES_Descriptor { ES_ID 3 muxInfo MuxInfo { - fileName "../auxiliary_files/logo.jpg" + fileName "../auxiliary_files/logo.png" } } ] diff --git a/src/bifs/script_enc.c b/src/bifs/script_enc.c index dc50a4c..4ebf8ae 100644 --- a/src/bifs/script_enc.c +++ b/src/bifs/script_enc.c @@ -620,11 +620,11 @@ void SFE_PutInteger(ScriptEnc *sc_enc, char *str) u32 nbBits, val = 0; if (sc_enc->emul) return; if ((str[0]=='0') && (str[1]=='x' || str[1]=='X')) { - val = strtoul(sc_enc->token, (char **) NULL, 16); + val = (u32) strtoul(sc_enc->token, (char **) NULL, 16); } else if (str[0]=='0' && isdigit(str[1])) { - val = strtoul(str, (char **) NULL, 8); + val = (u32) strtoul(str, (char **) NULL, 8); } else if (isdigit(str[0])) { - val = strtoul(str, (char **) NULL, 10); + val = (u32) strtoul(str, (char **) NULL, 10); } else { GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[bifs] Script encoding: %s is not an integer\n", str)); sc_enc->err = GF_BAD_PARAM; @@ -872,11 +872,11 @@ u32 SFE_PutCaseInteger(ScriptEnc *sc_enc, char *str, u32 nbBits) { u32 val = 0; if ((str[0]=='0') && (str[1]=='x' || str[1]=='X')) { - val = strtoul(sc_enc->token, (char **) NULL, 16); + val = (u32) strtoul(sc_enc->token, (char **) NULL, 16); } else if (str[0]=='0' && isdigit(str[1])) { - val = strtoul(str, (char **) NULL, 8); + val = (u32) strtoul(str, (char **) NULL, 8); } else if (isdigit(str[0])) { - val = strtoul(str, (char **) NULL, 10); + val = (u32) strtoul(str, (char **) NULL, 10); } else { GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[bifs] Script encoding: %s is not an integer\n", str)); sc_enc->err = GF_BAD_PARAM; diff --git a/src/compositor/audio_input.c b/src/compositor/audio_input.c index 3c5eeb6..ae170fc 100644 --- a/src/compositor/audio_input.c +++ b/src/compositor/audio_input.c @@ -76,15 +76,19 @@ static char *gf_audio_input_fetch_frame(void *callback, u32 *size, u32 audio_del gf_mo_get_object_time(ai->stream, &obj_time); obj_time += audio_delay_ms; - drift = (s32)obj_time; - drift -= (s32)ts; - + if (ai->compositor->bench_mode) { + drift = 0; + } else { + drift = (s32)obj_time; + drift -= (s32)ts; + } + #ifdef ENABLE_EARLY_FRAME_DETECTION /*too early (silence insertions), skip*/ if (drift < 0) { GF_LOG(GF_LOG_INFO, GF_LOG_AUDIO, ("[Audio Input] audio too early of %d (CTS %u at OTB %u with audio delay %d ms)\n", drift + audio_delay_ms, ts, obj_time, audio_delay_ms)); ai->need_release = 0; - gf_mo_release_data(ai->stream, 0, 0); + gf_mo_release_data(ai->stream, 0, -1); *size = 0; return NULL; } diff --git a/src/compositor/audio_render.c b/src/compositor/audio_render.c index 0d2b933..5e0c94a 100644 --- a/src/compositor/audio_render.c +++ b/src/compositor/audio_render.c @@ -424,25 +424,36 @@ GF_AudioRenderer *gf_sc_ar_load(GF_User *user) } } if (!ar->audio_out) { + GF_AudioOutput *raw_out = NULL; count = gf_modules_get_count(ar->user->modules); for (i=0; iaudio_out = (GF_AudioOutput *) gf_modules_load_interface(ar->user->modules, i, GF_AUDIO_OUTPUT_INTERFACE); if (!ar->audio_out) continue; + + //in enum mode, only use raw out if everything else failed ... + if (!stricmp(ar->audio_out->module_name, "Raw Audio Output")) { + raw_out = ar->audio_out; + ar->audio_out = NULL; + continue; + } GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Audio output module %s loaded\n", ar->audio_out->module_name)); /*check that's a valid audio compositor*/ - if (ar->audio_out->SelfThreaded) { - if (ar->audio_out->SetPriority) break; - } else { - if (ar->audio_out->WriteAudio) break; + if ((ar->audio_out->SelfThreaded && ar->audio_out->SetPriority) || ar->audio_out->WriteAudio) { + /*remember the module we use*/ + gf_cfg_set_key(user->config, "Audio", "DriverName", ar->audio_out->module_name); + break; } gf_modules_close_interface((GF_BaseInterface *)ar->audio_out); ar->audio_out = NULL; } + if (raw_out) { + if (ar->audio_out) gf_modules_close_interface((GF_BaseInterface *)raw_out); + else ar->audio_out = raw_out; + } } /*if not init we run with a NULL audio compositor*/ if (ar->audio_out) { - ar->audio_out->FillBuffer = gf_ar_fill_output; ar->audio_out->audio_renderer = ar; GF_LOG(GF_LOG_DEBUG, GF_LOG_AUDIO, ("[AudioRender] Setting up audio module %s\n", ar->audio_out->module_name)); @@ -458,8 +469,6 @@ GF_AudioRenderer *gf_sc_ar_load(GF_User *user) gf_modules_close_interface((GF_BaseInterface *)ar->audio_out); ar->audio_out = NULL; } else { - /*remember the module we use*/ - gf_cfg_set_key(user->config, "Audio", "DriverName", ar->audio_out->module_name); if (!ar->audio_out->SelfThreaded) { ar->th = gf_th_new("AudioRenderer"); gf_th_run(ar->th, gf_ar_proc, ar); diff --git a/src/compositor/compositor.c b/src/compositor/compositor.c index 02e7637..4445953 100644 --- a/src/compositor/compositor.c +++ b/src/compositor/compositor.c @@ -59,6 +59,9 @@ static void gf_sc_set_fullscreen(GF_Compositor *compositor) GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Compositor] Switching fullscreen %s\n", compositor->fullscreen ? "off" : "on")); /*move to FS*/ compositor->fullscreen = !compositor->fullscreen; + + gf_sc_ar_control(compositor->audio_renderer, 0); + if (compositor->fullscreen && (compositor->scene_width>=compositor->scene_height) #ifndef GPAC_DISABLE_3D && !compositor->visual->type_3d @@ -69,6 +72,8 @@ static void gf_sc_set_fullscreen(GF_Compositor *compositor) e = compositor->video_out->SetFullScreen(compositor->video_out, compositor->fullscreen, &compositor->display_width, &compositor->display_height); } + gf_sc_ar_control(compositor->audio_renderer, 1); + if (e) { GF_Event evt; memset(&evt, 0, sizeof(GF_Event)); @@ -167,9 +172,14 @@ static void gf_sc_reconfig_task(GF_Compositor *compositor) /*fullscreen on/off request*/ if (compositor->msg_type & GF_SR_CFG_FULLSCREEN) { compositor->msg_type &= ~GF_SR_CFG_FULLSCREEN; - gf_sc_set_fullscreen(compositor); - gf_sc_next_frame_state(compositor, GF_SC_DRAW_FRAME); - notif_size=1; + //video is about to resetup, wait for the setup + if (compositor->recompute_ar) { + compositor->fullscreen_postponed = 1; + } else { + gf_sc_set_fullscreen(compositor); + gf_sc_next_frame_state(compositor, GF_SC_DRAW_FRAME); + notif_size=1; + } } compositor->msg_type &= ~GF_SR_IN_RECONFIG; } @@ -201,16 +211,22 @@ static void gf_sc_reconfig_task(GF_Compositor *compositor) } } -Bool gf_sc_draw_frame(GF_Compositor *compositor) +GF_EXPORT +Bool gf_sc_draw_frame(GF_Compositor *compositor, u32 *ms_till_next) { gf_sc_simulation_tick(compositor); + if (ms_till_next) { + if ((s32) compositor->next_frame_delay == -1) + *ms_till_next = compositor->frame_duration; + else + *ms_till_next = MIN(compositor->next_frame_delay, compositor->frame_duration); + } if (compositor->frame_draw_type) return 1; if (compositor->fonts_pending) return 1; return GF_FALSE; } - /*forces graphics redraw*/ GF_EXPORT void gf_sc_reset_graphics(GF_Compositor *compositor) @@ -317,21 +333,25 @@ static GF_Err gf_sc_create(GF_Compositor *compositor) gf_modules_close_interface((GF_BaseInterface *)compositor->video_out); compositor->video_out = NULL; } - } else { - GF_LOG(GF_LOG_WARNING, GF_LOG_CORE, ("Failed to load module %s, no video driver.\n", sOpt)); - sOpt = NULL; } } if (!compositor->video_out) { - u32 i, count; - count = gf_modules_get_count(compositor->user->modules); + GF_VideoOutput *raw_out = NULL; + u32 i, count = gf_modules_get_count(compositor->user->modules); GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("Trying to find a suitable video driver amongst %d modules...\n", count)); for (i=0; ivideo_out = (GF_VideoOutput *) gf_modules_load_interface(compositor->user->modules, i, GF_VIDEO_OUTPUT_INTERFACE); if (!compositor->video_out) continue; compositor->video_out->evt_cbk_hdl = compositor; compositor->video_out->on_event = gf_sc_on_event; + //in enum mode, only use raw out if everything else failed ... + if (!stricmp(compositor->video_out->module_name, "Raw Video Output")) { + raw_out = compositor->video_out; + compositor->video_out = NULL; + continue; + } + /*init hw*/ if (compositor->video_out->Setup(compositor->video_out, compositor->user->os_window_handler, compositor->user->os_display, compositor->user->init_flags)==GF_OK) { gf_cfg_set_key(compositor->user->config, "Video", "DriverName", compositor->video_out->module_name); @@ -340,6 +360,13 @@ static GF_Err gf_sc_create(GF_Compositor *compositor) gf_modules_close_interface((GF_BaseInterface *)compositor->video_out); compositor->video_out = NULL; } + if (raw_out) { + if (compositor->video_out) gf_modules_close_interface((GF_BaseInterface *)raw_out); + else { + compositor->video_out = raw_out; + compositor->video_out ->Setup(compositor->video_out, compositor->user->os_window_handler, compositor->user->os_display, compositor->user->init_flags); + } + } } if (!compositor->video_out ) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("Failed to create compositor->video_out, did not find any suitable driver.")); @@ -393,13 +420,13 @@ static GF_Err gf_sc_create(GF_Compositor *compositor) } compositor->textures = gf_list_new(); + compositor->textures_gc = gf_list_new(); compositor->frame_rate = 30.0; compositor->frame_duration = 33; compositor->time_nodes = gf_list_new(); -#ifdef GF_SR_EVENT_QUEUE - compositor->events = gf_list_new(); - compositor->ev_mx = gf_mx_new("EventQueue"); -#endif + compositor->event_queue = gf_list_new(); + compositor->event_queue_back = gf_list_new(); + compositor->evq_mx = gf_mx_new("EventQueue"); #ifdef GF_SR_USE_VIDEO_CACHE compositor->cached_groups = gf_list_new(); @@ -417,6 +444,7 @@ static GF_Err gf_sc_create(GF_Compositor *compositor) compositor->interaction_level = GF_INTERACT_NORMAL | GF_INTERACT_INPUT_SENSOR | GF_INTERACT_NAVIGATION; compositor->scene_sampled_clock = 0; + compositor->video_th_id = gf_th_id(); return GF_OK; } @@ -446,15 +474,22 @@ static u32 gf_sc_proc(void *par) compositor->video_th_state = GF_COMPOSITOR_THREAD_RUN; while (compositor->video_th_state == GF_COMPOSITOR_THREAD_RUN) { - if (compositor->is_hidden==1) + if (compositor->is_hidden==1) { + if (!compositor->bench_mode) { + compositor->scene_sampled_clock = gf_sc_ar_get_clock(compositor->audio_renderer); + } gf_sleep(compositor->frame_duration); - else + } else gf_sc_simulation_tick(compositor); } #ifndef GPAC_DISABLE_3D visual_3d_reset_graphics(compositor->visual); + compositor_2d_reset_gl_auto(compositor); #endif + gf_sc_texture_cleanup_hw(compositor); + + /*destroy video out here if we're using openGL, to avoid threading issues*/ compositor->video_out->Shutdown(compositor->video_out); gf_modules_close_interface((GF_BaseInterface *)compositor->video_out); @@ -546,7 +581,13 @@ void gf_sc_del(GF_Compositor *compositor) } } gf_th_del(compositor->VisualThread); + } else { +#ifndef GPAC_DISABLE_3D + compositor_2d_reset_gl_auto(compositor); +#endif + gf_sc_texture_cleanup_hw(compositor); } + if (compositor->video_out) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Closing video output\n")); compositor->video_out->Shutdown(compositor->video_out); @@ -579,7 +620,6 @@ void gf_sc_del(GF_Compositor *compositor) } #ifndef GPAC_DISABLE_3D - compositor_2d_reset_gl_auto(compositor); if (compositor->unit_bbox) mesh_free(compositor->unit_bbox); #endif @@ -597,17 +637,21 @@ void gf_sc_del(GF_Compositor *compositor) } gf_list_del(compositor->proto_modules); } -#ifdef GF_SR_EVENT_QUEUE - gf_mx_p(compositor->ev_mx); - while (gf_list_count(compositor->events)) { - GF_Event *ev = (GF_Event *)gf_list_get(compositor->events, 0); - gf_list_rem(compositor->events, 0); - gf_free(ev); + gf_mx_p(compositor->evq_mx); + while (gf_list_count(compositor->event_queue)) { + GF_QueuedEvent *qev = (GF_QueuedEvent *)gf_list_get(compositor->event_queue, 0); + gf_list_rem(compositor->event_queue, 0); + gf_free(qev); } - gf_mx_v(compositor->ev_mx); - gf_mx_del(compositor->ev_mx); - gf_list_del(compositor->events); -#endif + while (gf_list_count(compositor->event_queue_back)) { + GF_QueuedEvent *qev = (GF_QueuedEvent *)gf_list_get(compositor->event_queue_back, 0); + gf_list_rem(compositor->event_queue, 0); + gf_free(qev); + } + gf_mx_v(compositor->evq_mx); + gf_mx_del(compositor->evq_mx); + gf_list_del(compositor->event_queue); + gf_list_del(compositor->event_queue_back); if (compositor->font_manager) gf_font_manager_del(compositor->font_manager); @@ -617,6 +661,7 @@ void gf_sc_del(GF_Compositor *compositor) #endif if (compositor->textures) gf_list_del(compositor->textures); + if (compositor->textures_gc) gf_list_del(compositor->textures_gc); if (compositor->time_nodes) gf_list_del(compositor->time_nodes); if (compositor->extra_scenes) gf_list_del(compositor->extra_scenes); if (compositor->video_listeners) gf_list_del(compositor->video_listeners); @@ -745,7 +790,7 @@ void compositor_set_ar_scale(GF_Compositor *compositor, Fixed scaleX, Fixed scal compositor_2d_set_user_transform(compositor, compositor->zoom, compositor->trans_x, compositor->trans_y, 1); } -static void gf_sc_reset(GF_Compositor *compositor) +static void gf_sc_reset(GF_Compositor *compositor, Bool has_scene) { Bool draw_mode; @@ -789,7 +834,7 @@ static void gf_sc_reset(GF_Compositor *compositor) #ifndef GPAC_DISABLE_3D //force a recompute of the canvas - if (compositor->hybgl_txh) { + if (has_scene && compositor->hybgl_txh) { compositor->hybgl_txh->width = compositor->hybgl_txh->height = 0; } #endif @@ -836,19 +881,18 @@ GF_Err gf_sc_set_scene(GF_Compositor *compositor, GF_SceneGraph *scene_graph) gf_sc_ar_reset(compositor->audio_renderer); } -#ifdef GF_SR_EVENT_QUEUE GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Reseting event queue\n")); - gf_mx_p(compositor->ev_mx); - while (gf_list_count(compositor->events)) { - GF_Event *ev = (GF_Event*)gf_list_get(compositor->events, 0); - gf_list_rem(compositor->events, 0); - gf_free(ev); + gf_mx_p(compositor->evq_mx); + while (gf_list_count(compositor->event_queue)) { + GF_QueuedEvent *qev = (GF_QueuedEvent*)gf_list_get(compositor->event_queue, 0); + gf_list_rem(compositor->event_queue, 0); + gf_free(qev); } -#endif + gf_mx_v(compositor->evq_mx); GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Reseting compositor module\n")); /*reset main surface*/ - gf_sc_reset(compositor); + gf_sc_reset(compositor, scene_graph ? 1 : 0); /*set current graph*/ compositor->scene = scene_graph; @@ -966,9 +1010,6 @@ GF_Err gf_sc_set_scene(GF_Compositor *compositor, GF_SceneGraph *scene_graph) } gf_sc_reset_framerate(compositor); -#ifdef GF_SR_EVENT_QUEUE - gf_mx_v(compositor->ev_mx); -#endif gf_sc_lock(compositor, 0); if (scene_graph) @@ -1160,20 +1201,22 @@ void gf_sc_reload_config(GF_Compositor *compositor) #ifndef GPAC_DISABLE_3D - sOpt = gf_cfg_get_key(compositor->user->config, "Compositor", "ForceOpenGL"); - compositor->force_opengl_2d = (sOpt && !strcmp(sOpt, "yes")) ? 1 : 0; + sOpt = gf_cfg_get_key(compositor->user->config, "Compositor", "OpenGLMode"); + compositor->force_opengl_2d = (sOpt && !strcmp(sOpt, "always")) ? 1 : 0; if (!sOpt) { - compositor->visual->type_3d = 1; compositor->recompute_ar = 1; compositor->autoconfig_opengl = 1; - } - + } else { + compositor->hybrid_opengl = !strcmp(sOpt, "hybrid") ? 1 : 0; #ifdef OPENGL_RASTER - compositor->opengl_raster = (sOpt && !strcmp(sOpt, "raster")) ? 1 : 0; - if (compositor->opengl_raster) compositor->traverse_state->immediate_draw = GF_TRUE; + compositor->opengl_raster = !strcmp(sOpt, "raster") ? 1 : 0; + if (compositor->opengl_raster) compositor->traverse_state->immediate_draw = GF_TRUE; #endif + } - compositor->hybrid_opengl = (sOpt && !strcmp(sOpt, "hybrid")) ? 1 : 0; + sOpt = gf_cfg_get_key(compositor->user->config, "Compositor", "EnablePBO"); + if (!sOpt) gf_cfg_set_key(compositor->user->config, "Compositor", "EnablePBO", "no"); + compositor->enable_pbo = (sOpt && !strcmp(sOpt, "yes")) ? 1 : 0; sOpt = gf_cfg_get_key(compositor->user->config, "Compositor", "DefaultNavigationMode"); if (sOpt && !strcmp(sOpt, "Walk")) compositor->default_navigation_mode = GF_NAVIGATE_WALK; @@ -1571,51 +1614,58 @@ GF_Err gf_sc_set_option(GF_Compositor *compositor, u32 type, u32 value) return e; } -u32 gf_sc_get_option(GF_Compositor *compositor, u32 type) +Bool gf_sc_is_over(GF_Compositor *compositor, GF_SceneGraph *scene_graph) { - switch (type) { - case GF_OPT_PLAY_STATE: return compositor->paused ? 1 : 0; - case GF_OPT_OVERRIDE_SIZE: return (compositor->override_size_flags & 1) ? 1 : 0; - case GF_OPT_IS_FINISHED: - if (compositor->interaction_sensors) return 0; - case GF_OPT_IS_OVER: - { - u32 i, count; - count = gf_list_count(compositor->time_nodes); - for (i=0; itime_nodes, i); - if (tn->needs_unregister) continue; - switch (gf_node_get_tag((GF_Node *)tn->udta)) { + u32 i, count; + count = gf_list_count(compositor->time_nodes); + for (i=0; itime_nodes, i); + if (tn->needs_unregister) continue; + + if (scene_graph && (gf_node_get_graph((GF_Node *)tn->udta) != scene_graph)) + continue; + + switch (gf_node_get_tag((GF_Node *)tn->udta)) { #ifndef GPAC_DISABLE_VRML - case TAG_MPEG4_TimeSensor: + case TAG_MPEG4_TimeSensor: #endif #ifndef GPAC_DISABLE_X3D - case TAG_X3D_TimeSensor: + case TAG_X3D_TimeSensor: #endif - return 0; + return 0; #ifndef GPAC_DISABLE_VRML - case TAG_MPEG4_MovieTexture: + case TAG_MPEG4_MovieTexture: #ifndef GPAC_DISABLE_X3D - case TAG_X3D_MovieTexture: + case TAG_X3D_MovieTexture: #endif - if (((M_MovieTexture *)tn->udta)->loop) return 0; - break; - case TAG_MPEG4_AudioClip: + if (((M_MovieTexture *)tn->udta)->loop) return 0; + break; + case TAG_MPEG4_AudioClip: #ifndef GPAC_DISABLE_X3D - case TAG_X3D_AudioClip: + case TAG_X3D_AudioClip: #endif - if (((M_AudioClip*)tn->udta)->loop) return 0; - break; - case TAG_MPEG4_AnimationStream: - if (((M_AnimationStream*)tn->udta)->loop) return 0; - break; + if (((M_AudioClip*)tn->udta)->loop) return 0; + break; + case TAG_MPEG4_AnimationStream: + if (((M_AnimationStream*)tn->udta)->loop) return 0; + break; #endif - } } } - /*FIXME - this does not work with SVG/SMIL*/ - return 1; + /*FIXME - this does not work with SVG/SMIL*/ + return 1; +} + +u32 gf_sc_get_option(GF_Compositor *compositor, u32 type) +{ + switch (type) { + case GF_OPT_PLAY_STATE: return compositor->paused ? 1 : 0; + case GF_OPT_OVERRIDE_SIZE: return (compositor->override_size_flags & 1) ? 1 : 0; + case GF_OPT_IS_FINISHED: + if (compositor->interaction_sensors) return 0; + case GF_OPT_IS_OVER: + return gf_sc_is_over(compositor, NULL); case GF_OPT_STRESS_MODE: return compositor->stress_mode; case GF_OPT_AUDIO_VOLUME: return compositor->audio_renderer->volume; case GF_OPT_AUDIO_PAN: return compositor->audio_renderer->pan; @@ -1939,6 +1989,7 @@ static void gf_sc_recompute_ar(GF_Compositor *compositor, GF_Node *top_node) } #endif + gf_sc_ar_control(compositor->audio_renderer, 0); #ifndef GPAC_DISABLE_3D if (compositor->autoconfig_opengl) { compositor->visual->type_3d = 1; @@ -1952,17 +2003,16 @@ static void gf_sc_recompute_ar(GF_Compositor *compositor, GF_Node *top_node) #endif compositor->autoconfig_opengl = 0; - //to change to "auto" once the GL auto mode is stable -#if 0 + //enable hybrid mode by default if (compositor->visual->yuv_rect_glsl_program) { - gf_cfg_set_key(compositor->user->config, "Compositor", "ForceOpenGL", "yes"); - compositor->force_opengl_2d = 1; + gf_cfg_set_key(compositor->user->config, "Compositor", "OpenGLMode", "hybrid"); + compositor->force_opengl_2d = 0; + compositor->hybrid_opengl = 1; } else { - gf_cfg_set_key(compositor->user->config, "Compositor", "ForceOpenGL", "no"); + gf_cfg_set_key(compositor->user->config, "Compositor", "OpenGLMode", "disable"); compositor->force_opengl_2d = 0; compositor->visual->type_3d = 0; } -#endif } } @@ -1976,10 +2026,15 @@ static void gf_sc_recompute_ar(GF_Compositor *compositor, GF_Node *top_node) #ifndef GPAC_USE_OGL_ES visual_3d_init_yuv_shader(compositor->visual); #endif - ra_init(&compositor->visual->hybgl_drawn); + if (!compositor->visual->hybgl_drawn.list) { + ra_init(&compositor->visual->hybgl_drawn); + } } #endif } + + gf_sc_ar_control(compositor->audio_renderer, 1); + gf_sc_next_frame_state(compositor, GF_SC_DRAW_NONE); #ifndef GPAC_DISABLE_LOG @@ -1989,6 +2044,13 @@ static void gf_sc_recompute_ar(GF_Compositor *compositor, GF_Node *top_node) #endif compositor_evaluate_envtests(compositor, 0); + + //fullscreen was postponed, retry now that the AR has been recomputed + if (compositor->fullscreen_postponed) { + compositor->fullscreen_postponed = 0; + compositor->msg_type |= GF_SR_CFG_FULLSCREEN; + } + } } @@ -2047,9 +2109,26 @@ static void gf_sc_draw_scene(GF_Compositor *compositor) extern u32 time_spent_in_anim; #endif +static void compositor_release_textures(GF_Compositor *compositor, Bool frame_drawn) +{ + u32 i, count; + /*release all textures - we must release them to handle a same OD being used by several textures*/ + count = gf_list_count(compositor->textures); + for (i=0; itextures, i); + gf_sc_texture_release_stream(txh); + if (frame_drawn && txh->tx_io && !(txh->flags & GF_SR_TEXTURE_USED)) + gf_sc_texture_reset(txh); + /*remove the use flag*/ + txh->flags &= ~GF_SR_TEXTURE_USED; + } +} + + void gf_sc_simulation_tick(GF_Compositor *compositor) { GF_SceneGraph *sg; + GF_List *temp_queue; u32 in_time, end_time, i, count; Bool frame_drawn, has_timed_nodes=GF_FALSE, all_tx_done=GF_TRUE; #ifndef GPAC_DISABLE_LOG @@ -2060,11 +2139,16 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) /*lock compositor for the whole cycle*/ gf_sc_lock(compositor, 1); + gf_sc_texture_cleanup_hw(compositor); + /*first thing to do, let the video output handle user event if it is not threaded*/ compositor->video_out->ProcessEvent(compositor->video_out, NULL); if (compositor->freeze_display) { gf_sc_lock(compositor, 0); + if (!compositor->bench_mode) { + compositor->scene_sampled_clock = gf_sc_ar_get_clock(compositor->audio_renderer); + } if (!compositor->no_regulation) gf_sleep(compositor->frame_duration); return; } @@ -2091,27 +2175,48 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) visual_reset_graphics(compositor->visual); } -#ifdef GF_SR_EVENT_QUEUE /*process pending user events*/ #ifndef GPAC_DISABLE_LOG event_time = gf_sys_clock(); #endif - gf_mx_p(compositor->ev_mx); - while (gf_list_count(compositor->events)) { - Bool ret; - GF_Event *ev = (GF_Event*)gf_list_get(compositor->events, 0); - gf_list_rem(compositor->events, 0); - ret = gf_sc_exec_event(compositor, ev); - gf_free(ev); + //swap event queus + gf_mx_p(compositor->evq_mx); + temp_queue = compositor->event_queue; + compositor->event_queue = compositor->event_queue_back; + compositor->event_queue_back = temp_queue; + gf_mx_v(compositor->evq_mx); + while (gf_list_count(compositor->event_queue_back)) { + GF_QueuedEvent *qev = (GF_QueuedEvent*)gf_list_get(compositor->event_queue_back, 0); + gf_list_rem(compositor->event_queue_back, 0); + + if (qev->target) { +#ifndef GPAC_DISABLE_SVG + gf_sg_fire_dom_event(qev->target, &qev->dom_evt, qev->sg, NULL); +#endif + } else if (qev->node) { +#ifndef GPAC_DISABLE_SVG + gf_dom_event_fire(qev->node, &qev->dom_evt); +#endif + } else { + gf_sc_exec_event(compositor, &qev->evt); + } + gf_free(qev); } - gf_mx_v(compositor->ev_mx); #ifndef GPAC_DISABLE_LOG event_time = gf_sys_clock() - event_time; #endif -#elif !defined(GPAC_DISABLE_LOG) - event_time = 0; -#endif - + + + if (!compositor->bench_mode) { + compositor->scene_sampled_clock = gf_sc_ar_get_clock(compositor->audio_renderer); + } else { + if (compositor->force_bench_frame==1) { + //a system frame is pending on a future frame - we must increase our time + compositor->scene_sampled_clock += compositor->frame_duration; + } + compositor->force_bench_frame = 0; + } + //first update all natural textures to figure out timing compositor->frame_delay = (u32) -1; @@ -2141,35 +2246,32 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) gf_sc_lock(compositor, 0); return; } + + #ifndef GPAC_DISABLE_LOG texture_time = gf_sys_clock() - texture_time; #endif + //this is correct but doesn't bring much and we may actually waste time while sleeping that could be used for texture upload - we prefer sleeping at the end of the pass +#if 0 //if next video frame is due in this render cycle, wait until it matures if ((compositor->frame_delay > 0) && (compositor->frame_delay != (u32) -1)) { u32 diff=0; - while (1) { + compositor->frame_delay = MIN(compositor->frame_delay, (s32) compositor->frame_duration); + while (!compositor->video_frame_pending) { gf_sleep(0); diff = gf_sys_clock() - in_time; if (diff >= (u32) compositor->frame_delay) break; } - GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Waited %d ms for next frame and %d ms was required\n", diff, compositor->frame_delay)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Compositor] Waited %d ms for next frame and %d ms was required\n", diff, compositor->frame_delay)); if (compositor->next_frame_delay != (u32) -1) { if (diff < compositor->next_frame_delay) compositor->next_frame_delay -= diff; else compositor->next_frame_delay = 1; } } +#endif - if (!compositor->bench_mode) { - compositor->scene_sampled_clock = gf_sc_ar_get_clock(compositor->audio_renderer); - } else { - if (compositor->force_bench_frame==1) { - //a system frame is pending on a future frame - we must increase our time - compositor->scene_sampled_clock += compositor->frame_duration; - } - compositor->force_bench_frame = 0; - } #ifndef GPAC_DISABLE_SVG @@ -2335,6 +2437,8 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) /*if invalidated, draw*/ if (compositor->frame_draw_type) { GF_Window rc; + Bool textures_released = 0; + #ifndef GPAC_DISABLE_LOG traverse_time = gf_sys_clock(); time_spent_in_anim = 0; @@ -2349,7 +2453,7 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) compositor->frame_draw_type = 0; - GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Redrawing scene - OTB %d\n", compositor->scene_sampled_clock)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Redrawing scene - STB %d\n", compositor->scene_sampled_clock)); gf_sc_draw_scene(compositor); #ifndef GPAC_DISABLE_LOG traverse_time = gf_sys_clock() - traverse_time; @@ -2372,7 +2476,14 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) if(compositor->user->init_flags & GF_TERM_INIT_HIDE) compositor->skip_flush = 1; + //if no overlays, release textures before flushing, otherwise we might loose time waiting for vsync + if (!compositor->visual->has_overlays) { + compositor_release_textures(compositor, frame_drawn); + textures_released = 1; + } + if (compositor->skip_flush!=1) { + //release compositor in case we have vsync gf_sc_lock(compositor, 0); rc.x = rc.y = 0; @@ -2383,19 +2494,31 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) } else { compositor->skip_flush = 0; } + #ifndef GPAC_DISABLE_LOG flush_time = gf_sys_clock() - flush_time; + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Compositor] done flushing frame in %d ms\n", flush_time)); #endif visual_2d_draw_overlays(compositor->visual); compositor->last_had_overlays = compositor->visual->has_overlays; + if (!textures_released) { + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Compositor] Releasing textures after flush\n" )); + compositor_release_textures(compositor, frame_drawn); + } + if (compositor->stress_mode) { gf_sc_next_frame_state(compositor, GF_SC_DRAW_FRAME); gf_sc_reset_graphics(compositor); } compositor->reset_fonts = 0; + } else { + + //frame not drawn, release textures + compositor_release_textures(compositor, frame_drawn); + #ifndef GPAC_DISABLE_LOG traverse_time = 0; time_spent_in_anim = 0; @@ -2407,19 +2530,6 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) } compositor->reset_graphics = 0; - /*release all textures - we must release them to handle a same OD being used by several textures*/ - count = gf_list_count(compositor->textures); - for (i=0; itextures, i); - gf_sc_texture_release_stream(txh); - if (frame_drawn && txh->tx_io && !(txh->flags & GF_SR_TEXTURE_USED)) - gf_sc_texture_reset(txh); - /*remove the use flag*/ - txh->flags &= ~GF_SR_TEXTURE_USED; - } - - GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor] Frame %sdrawn\n", frame_drawn ? "" : "not " )); - compositor->last_frame_time = gf_sys_clock(); end_time = compositor->last_frame_time - in_time; @@ -2453,6 +2563,7 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) //in bench mode we always increase the clock of the fixed target simulation rate - this needs refinement if video is used ... compositor->scene_sampled_clock += compositor->frame_duration; } + compositor->video_frame_pending=0; gf_sc_lock(compositor, 0); #if 0 @@ -2478,9 +2589,13 @@ void gf_sc_simulation_tick(GF_Compositor *compositor) //we have a pending frame, return asap - we could sleep until frames matures but this give weird regulation if (compositor->next_frame_delay != (u32) -1) { + if (compositor->next_frame_delay>end_time) compositor->next_frame_delay-=end_time; + else compositor->next_frame_delay=0; + + compositor->next_frame_delay = MIN(compositor->next_frame_delay, 2*compositor->frame_duration); if (compositor->next_frame_delay>2) { u32 diff=0; - while (! compositor->msg_type) { + while (! compositor->msg_type && ! compositor->video_frame_pending) { gf_sleep(1); diff = gf_sys_clock() - in_time; if (diff >= (u32) compositor->next_frame_delay) @@ -2693,12 +2808,7 @@ void gf_sc_traverse_subscene_ex(GF_Compositor *compositor, GF_Node *inline_paren static Bool gf_sc_handle_event_intern(GF_Compositor *compositor, GF_Event *event, Bool from_user) { -#ifdef GF_SR_EVENT_QUEUE - GF_Event *ev; -#else Bool ret; - u32 retry; -#endif if (compositor->term && (compositor->interaction_level & GF_INTERACT_INPUT_SENSOR) && (event->type<=GF_EVENT_MOUSEWHEEL)) { GF_Event evt = *event; @@ -2712,58 +2822,12 @@ static Bool gf_sc_handle_event_intern(GF_Compositor *compositor, GF_Event *event return 0; } */ -#ifdef GF_SR_EVENT_QUEUE - switch (event->type) { - case GF_EVENT_MOUSEMOVE: - { - u32 i, count; - gf_mx_p(compositor->ev_mx); - count = gf_list_count(compositor->events); - for (i=0; ievents, i); - if (ev->type == GF_EVENT_MOUSEMOVE) { - ev->mouse = event->mouse; - gf_mx_v(compositor->ev_mx); - return 1; - } - } - gf_mx_v(compositor->ev_mx); - } - default: - ev = (GF_Event *)gf_malloc(sizeof(GF_Event)); - ev->type = event->type; - if (event->type<=GF_EVENT_MOUSEWHEEL) { - ev->mouse = event->mouse; - } else if (event->type==GF_EVENT_TEXTINPUT) { - ev->character = event->character; - } else { - ev->key = event->key; - } - gf_mx_p(compositor->ev_mx); - gf_list_add(compositor->events, ev); - gf_mx_v(compositor->ev_mx); - break; - } - return 0; -#else - - retry = 100; - while (retry) { - if (gf_mx_try_lock(compositor->mx)) - break; - retry--; - gf_sleep(0); - if (!retry) { - return GF_FALSE; - } - } + gf_mx_p(compositor->mx); ret = gf_sc_exec_event(compositor, event); gf_sc_lock(compositor, GF_FALSE); - if (!from_user) { - } +// if (!from_user) { } return ret; -#endif } void gf_sc_traverse_subscene(GF_Compositor *compositor, GF_Node *inline_parent, GF_SceneGraph *subscene, void *rs) @@ -3207,12 +3271,13 @@ Bool gf_sc_use_raw_texture(GF_Compositor *compositor) return compositor->texture_from_decoder_memory; } -void gf_sc_get_av_caps(GF_Compositor *compositor, u32 *width, u32 *height, u32 *bpp, u32 *channels, u32 *sample_rate) +void gf_sc_get_av_caps(GF_Compositor *compositor, u32 *width, u32 *height, u32 *display_bit_depth, u32 *audio_bpp, u32 *channels, u32 *sample_rate) { if (width) *width = compositor->video_out->max_screen_width; if (height) *height = compositor->video_out->max_screen_height; + if (display_bit_depth) *display_bit_depth = compositor->video_out->max_screen_bpp ? compositor->video_out->max_screen_bpp : 8; //to do - if (bpp) *bpp = 8; + if (audio_bpp) *audio_bpp = 8; if (channels) *channels = 0; if (sample_rate) *sample_rate = 48000; } @@ -3227,3 +3292,92 @@ void gf_sc_set_system_pending_frame(GF_Compositor *compositor, Bool frame_pendin compositor->force_bench_frame = 2; } } + +void gf_sc_set_video_pending_frame(GF_Compositor *compositor) +{ + compositor->video_frame_pending = GF_TRUE; +} + +void gf_sc_queue_dom_event(GF_Compositor *compositor, GF_Node *node, GF_DOM_Event *evt) +{ + u32 i, count; + GF_QueuedEvent *qev; + gf_mx_p(compositor->evq_mx); + + count = gf_list_count(compositor->event_queue); + for (i=0; ievent_queue, i); + if ((qev->node==node) && (qev->dom_evt.type==evt->type)) { + qev->dom_evt = *evt; + gf_mx_v(compositor->evq_mx); + return; + } + } + GF_SAFEALLOC(qev, GF_QueuedEvent); + qev->node = node; + qev->dom_evt = *evt; + gf_list_add(compositor->event_queue, qev); + gf_mx_v(compositor->evq_mx); +} + +void gf_sc_queue_dom_event_on_target(GF_Compositor *compositor, GF_DOM_Event *evt, GF_DOMEventTarget *target, GF_SceneGraph *sg) +{ + u32 i, count; + GF_QueuedEvent *qev; + gf_mx_p(compositor->evq_mx); + + count = gf_list_count(compositor->event_queue); + for (i=0; ievent_queue, i); + if ((qev->target==target) && (qev->dom_evt.type==evt->type) && (qev->sg==sg) ) { + qev->dom_evt = *evt; + gf_mx_v(compositor->evq_mx); + return; + } + } + + GF_SAFEALLOC(qev, GF_QueuedEvent); + qev->sg = sg; + qev->target = target; + qev->dom_evt = *evt; + gf_list_add(compositor->event_queue, qev); + gf_mx_v(compositor->evq_mx); +} + +static void sc_cleanup_event_queue(GF_List *evq, GF_Node *node, GF_SceneGraph *sg) +{ + u32 i, count = gf_list_count(evq); + for (i=0; inode) { + if (node && qev->node) + del = 1; + if (sg && (gf_node_get_graph(qev->node)==sg)) + del = 1; + } + if (qev->sg==sg) + del = 1; + else if (qev->target && (qev->target->ptr_type == GF_DOM_EVENT_TARGET_NODE)) { + if (node && ((GF_Node *)qev->target->ptr==node)) + del = 1; + if (sg && (gf_node_get_graph((GF_Node *)qev->target->ptr)==sg)) + del = 1; + } + + if (del) { + gf_list_rem(evq, i); + i--; + count--; + gf_free(qev); + } + } +} + +void gf_sc_node_destroy(GF_Compositor *compositor, GF_Node *node, GF_SceneGraph *sg) +{ + gf_mx_p(compositor->evq_mx); + sc_cleanup_event_queue(compositor->event_queue, node, sg); + sc_cleanup_event_queue(compositor->event_queue_back, node, sg); + gf_mx_v(compositor->evq_mx); +} diff --git a/src/compositor/compositor_2d.c b/src/compositor/compositor_2d.c index 76d26c4..a6af564 100644 --- a/src/compositor/compositor_2d.c +++ b/src/compositor/compositor_2d.c @@ -561,7 +561,7 @@ static void log_blit_times(GF_TextureHandler *txh, u32 push_time) gf_mo_get_object_time(txh->stream, &ck); if (ck>txh->last_frame_time) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Compositor2D] Bliting frame (CTS %d) %d ms too late\n", txh->last_frame_time, ck - txh->last_frame_time )); - } + } if (txh->nb_frames==100) { txh->nb_frames = 0; txh->upload_time = 0; @@ -569,7 +569,7 @@ static void log_blit_times(GF_TextureHandler *txh, u32 push_time) txh->nb_frames ++; txh->upload_time += push_time; - GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[2D Blitter] Blit texure (CTS %d) %d ms after due date - blit in %d ms - average push time %d ms\n", txh->last_frame_time, ck - txh->last_frame_time, push_time, txh->upload_time / txh->nb_frames)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[2D Blitter] At %d Blit texture (CTS %d) %d ms after due date - blit in %d ms - average push time %d ms\n", ck, txh->last_frame_time, ck - txh->last_frame_time, push_time, txh->upload_time / txh->nb_frames)); } #endif @@ -832,51 +832,57 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan #endif video_src.video_buffer = txh->data; if (txh->raw_memory) { - video_src.u_ptr = txh->pU; - video_src.v_ptr = txh->pV; + video_src.u_ptr = (char *) txh->pU; + video_src.v_ptr = (char *) txh->pV; } - if (overlay_type) { - u32 push_time; + //overlay queing + if (overlay_type==2) { + GF_IRect o_rc; + GF_OverlayStack *ol, *first; + + /*queue overlay in order*/ + GF_SAFEALLOC(ol, GF_OverlayStack); + ol->ctx = ctx; + ol->dst = dst_wnd; + ol->src = src_wnd; + first = visual->overlays; + if (first) { + while (first->next) first = first->next; + first->next = ol; + } else { + visual->overlays = ol; + } - if (overlay_type==2) { - GF_IRect o_rc; - GF_OverlayStack *ol, *first; - - /*queue overlay in order*/ - GF_SAFEALLOC(ol, GF_OverlayStack); - ol->ctx = ctx; - ol->dst = dst_wnd; - ol->src = src_wnd; - first = visual->overlays; - if (first) { - while (first->next) first = first->next; - first->next = ol; - } else { - visual->overlays = ol; - } + if (visual->center_coords) { + o_rc.x = dst_wnd.x - output_width/2; + o_rc.y = output_height/2- dst_wnd.y; + } else { + o_rc.x = dst_wnd.x; + o_rc.y = dst_wnd.y + dst_wnd.h; + } - if (visual->center_coords) { - o_rc.x = dst_wnd.x - output_width/2; - o_rc.y = output_height/2- dst_wnd.y; - } else { - o_rc.x = dst_wnd.x; - o_rc.y = dst_wnd.y + dst_wnd.h; - } + o_rc.width = dst_wnd.w; + o_rc.height = dst_wnd.h; + visual->ClearSurface(visual, &o_rc, visual->compositor->video_out->overlay_color_key); + visual->has_overlays = GF_TRUE; + /*mark drawable as overlay*/ + ctx->drawable->flags |= DRAWABLE_IS_OVERLAY; - o_rc.width = dst_wnd.w; - o_rc.height = dst_wnd.h; - visual->ClearSurface(visual, &o_rc, visual->compositor->video_out->overlay_color_key); - visual->has_overlays = GF_TRUE; - /*mark drawable as overlay*/ - ctx->drawable->flags |= DRAWABLE_IS_OVERLAY; + /*prevents this context from being removed in direct draw mode by requesting a new one + but not allocating it*/ + if (tr_state->immediate_draw) + visual_2d_get_drawable_context(visual); + + return GF_TRUE; + } + + //will pause clock if first HW load + gf_sc_texture_check_pause_on_first_load(txh); + + if (overlay_type) { + u32 push_time; - /*prevents this context from being removed in direct draw mode by requesting a new one - but not allocating it*/ - if (tr_state->immediate_draw) - visual_2d_get_drawable_context(visual); - return GF_TRUE; - } /*top level overlay*/ if (flush_video) { GF_Window rc; @@ -892,6 +898,7 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan push_time = gf_sys_clock(); e = visual->compositor->video_out->Blit(visual->compositor->video_out, &video_src, &src_wnd, &dst_wnd, 1); + if (!e) { #ifndef GPAC_DISABLE_LOG log_blit_times(txh, push_time); @@ -899,6 +906,9 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan /*mark drawable as overlay*/ ctx->drawable->flags |= DRAWABLE_IS_OVERLAY; visual->has_overlays = GF_TRUE; + + //will resume clock if first HW load + gf_sc_texture_check_pause_on_first_load(txh); return GF_TRUE; } GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor2D] Error during overlay blit - trying with soft one\n")); @@ -912,6 +922,7 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan if (!use_soft_stretch) { u32 push_time = gf_sys_clock(); e = visual->compositor->video_out->Blit(visual->compositor->video_out, &video_src, &src_wnd, &dst_wnd, 0); + /*HW pb, try soft*/ if (e) { use_soft_stretch = GF_TRUE; @@ -933,6 +944,10 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan } #endif } + + //will resume clock if first HW load + gf_sc_texture_check_pause_on_first_load(txh); + if (use_soft_stretch) { GF_VideoSurface backbuffer; e = visual->compositor->video_out->LockBackBuffer(visual->compositor->video_out, &backbuffer, GF_TRUE); @@ -946,6 +961,7 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan } else { GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor2D] Cannot lock back buffer - Error %s\n", gf_error_to_string(e) )); if (is_attached) visual_2d_init_raster(visual); + return GF_FALSE; } if (!visual->compositor->video_memory) { @@ -956,7 +972,7 @@ static Bool compositor_2d_draw_bitmap_ex(GF_VisualManager *visual, GF_TextureHan } } visual->has_modif = GF_TRUE; - if (is_attached) visual_2d_init_raster(visual); +// if (is_attached) visual_2d_init_raster(visual); return GF_TRUE; } @@ -1201,7 +1217,7 @@ GF_Err compositor_2d_set_aspect_ratio(GF_Compositor *compositor) if (compositor->was_system_memory != evt.setup.system_memory) changed = 1; else if (old_vp_width != compositor->vp_width) changed=1; else if (old_vp_height != compositor->vp_height) changed=1; - else if (compositor->was_opengl != evt.setup.opengl_mode) changed=1; + else if (compositor->is_opengl != evt.setup.opengl_mode) changed=1; if (changed) { @@ -1227,7 +1243,7 @@ GF_Err compositor_2d_set_aspect_ratio(GF_Compositor *compositor) return e; } - compositor->was_opengl = evt.setup.opengl_mode; + compositor->is_opengl = evt.setup.opengl_mode; compositor->was_system_memory = evt.setup.system_memory; } if (compositor->has_size_info) { diff --git a/src/compositor/compositor_3d.c b/src/compositor/compositor_3d.c index c1dc2e0..db29271 100644 --- a/src/compositor/compositor_3d.c +++ b/src/compositor/compositor_3d.c @@ -110,6 +110,7 @@ GF_Err compositor_3d_set_aspect_ratio(GF_Compositor *compositor) evt.setup.opengl_mode = 0; #else evt.setup.opengl_mode = 1; + compositor->is_opengl = 1; #endif if (compositor->video_out->ProcessEvent(compositor->video_out, &evt)<0) { @@ -210,7 +211,7 @@ void compositor_3d_draw_bitmap(Drawable *stack, DrawAspect2D *asp, GF_TraverseSt if (!alpha) alpha = GF_COL_A(asp->line_color); /*texture is available in hw, use it - if blending, force using texture*/ - if (!gf_sc_texture_needs_reload(txh) || (alpha != 0xFF) || use_texture + if (use_texture || !gf_sc_texture_needs_reload(txh) || (alpha != 0xFF) #ifdef GF_SR_USE_DEPTH || tr_state->depth_offset #endif diff --git a/src/compositor/gl_inc.h b/src/compositor/gl_inc.h index 1e0708c..020dffd 100644 --- a/src/compositor/gl_inc.h +++ b/src/compositor/gl_inc.h @@ -328,6 +328,8 @@ GLDECL(void, glDeleteBuffers, (GLsizei , GLuint *) ) GLDECL(void, glBindBuffer, (GLenum, GLuint ) ) GLDECL(void, glBufferData, (GLenum, int, void *, GLenum) ) GLDECL(void, glBufferSubData, (GLenum, int, int, void *) ) +GLDECL(void *, glMapBuffer, (GLenum, GLenum) ) +GLDECL(void *, glUnmapBuffer, (GLenum) ) #endif //GL_VERSION_1_5 @@ -421,6 +423,10 @@ GLDECL(void, glBufferSubData, (GLenum, int, int, void *) ) #define GL_STENCIL_BACK_REF 0x8CA3 #define GL_STENCIL_BACK_VALUE_MASK 0x8CA4 #define GL_STENCIL_BACK_WRITEMASK 0x8CA5 +#define GL_PIXEL_UNPACK_BUFFER_ARB 0x88EC +#define GL_STREAM_DRAW_ARB 0x88E0 +#define GL_WRITE_ONLY_ARB 0x88B9 +#define GL_DYNAMIC_DRAW_ARB 0x88E8 GLDECL(GLuint, glCreateProgram, (void) ) diff --git a/src/compositor/mpeg4_textures.c b/src/compositor/mpeg4_textures.c index 248644d..5e10d32 100644 --- a/src/compositor/mpeg4_textures.c +++ b/src/compositor/mpeg4_textures.c @@ -73,6 +73,8 @@ static void movietexture_activate(MovieTextureStack *stack, M_MovieTexture *mt, if (!stack->txh.is_open) { scene_time -= mt->startTime; gf_sc_texture_play_from_to(&stack->txh, &mt->url, scene_time, -1, gf_mo_get_loop(stack->txh.stream, mt->loop), 0); + } else if (stack->first_frame_fetched) { + gf_mo_resume(stack->txh.stream); } gf_mo_set_speed(stack->txh.stream, mt->speed); } @@ -96,7 +98,7 @@ static void movietexture_update(GF_TextureHandler *txh) if (!txnode->isActive && st->first_frame_fetched) return; /*when fetching the first frame disable resync*/ - gf_sc_texture_update_frame(txh, !txnode->isActive); + gf_sc_texture_update_frame(txh, 0); if (txh->stream_finished) { if (movietexture_get_loop(st, txnode)) { @@ -114,7 +116,7 @@ static void movietexture_update(GF_TextureHandler *txh) gf_node_event_out_str(txh->owner, "duration_changed"); /*stop stream if needed*/ if (!txnode->isActive && txh->is_open) { - gf_sc_texture_stop(txh); + gf_mo_pause(txh->stream); /*make sure the refresh flag is not cleared*/ txh->needs_refresh = 1; gf_sc_invalidate(txh->compositor, NULL); @@ -148,6 +150,8 @@ static void movietexture_update_time(GF_TimeNode *st) stack->fetch_first_frame = 0; if (!stack->txh.is_open) gf_sc_texture_play(&stack->txh, &mt->url); + else + gf_mo_resume(stack->txh.stream); } return; } diff --git a/src/compositor/svg_grouping.c b/src/compositor/svg_grouping.c index e30e847..51cce0b 100644 --- a/src/compositor/svg_grouping.c +++ b/src/compositor/svg_grouping.c @@ -458,7 +458,7 @@ static void svg_traverse_svg(GF_Node *node, void *rs, Bool is_destroy) evt.prev_translate.x = stack->vp.x; evt.prev_translate.y = stack->vp.y; evt.type = GF_EVENT_VP_RESIZE; - gf_scene_notify_event(scene, 0, NULL, &evt, GF_OK); + gf_scene_notify_event(scene, 0, NULL, &evt, GF_OK, GF_TRUE); } } diff --git a/src/compositor/svg_media.c b/src/compositor/svg_media.c index 8214f35..645c046 100644 --- a/src/compositor/svg_media.c +++ b/src/compositor/svg_media.c @@ -213,6 +213,10 @@ static void SVG_Build_Bitmap_Graph(SVG_video_stack *stack, GF_TraverseState *tr_ gf_node_dirty_clear(stack->graph->node, GF_SG_SVG_GEOMETRY_DIRTY); } +static void svg_open_texture(SVG_video_stack *stack) +{ + gf_sc_texture_open(&stack->txh, &stack->txurl, GF_FALSE); +} static void svg_play_texture(SVG_video_stack *stack, SVGAllAttributes *atts) { @@ -246,7 +250,6 @@ static void svg_traverse_bitmap(GF_Node *node, void *rs, Bool is_destroy) DrawableContext *ctx; SVGAllAttributes all_atts; - if (is_destroy) { gf_sc_texture_destroy(&stack->txh); gf_sg_mfurl_del(stack->txurl); @@ -259,7 +262,6 @@ static void svg_traverse_bitmap(GF_Node *node, void *rs, Bool is_destroy) return; } - /*TRAVERSE_DRAW is NEVER called in 3D mode*/ if (tr_state->traversing_mode==TRAVERSE_DRAW_2D) { SVG_Draw_bitmap(tr_state); @@ -436,20 +438,24 @@ static void SVG_Update_video(GF_TextureHandler *txh) GF_FieldInfo init_vis_info; SVG_video_stack *stack = (SVG_video_stack *) gf_node_get_private(txh->owner); - if (!txh->is_open) { - SVG_InitialVisibility init_vis; - if (stack->first_frame_fetched) return; + if (!txh->stream) { + svg_open_texture(stack); - init_vis = SVG_INITIALVISIBILTY_WHENSTARTED; + if (!txh->is_open) { + SVG_InitialVisibility init_vis; + if (stack->first_frame_fetched) return; - if (gf_node_get_attribute_by_tag(txh->owner, TAG_SVG_ATT_initialVisibility, GF_FALSE, GF_FALSE, &init_vis_info) == GF_OK) { - init_vis = *(SVG_InitialVisibility *)init_vis_info.far_ptr; - } + init_vis = SVG_INITIALVISIBILTY_WHENSTARTED; + + if (gf_node_get_attribute_by_tag(txh->owner, TAG_SVG_ATT_initialVisibility, GF_FALSE, GF_FALSE, &init_vis_info) == GF_OK) { + init_vis = *(SVG_InitialVisibility *)init_vis_info.far_ptr; + } - /*opens stream only at first access to fetch first frame if needed*/ - if (init_vis == SVG_INITIALVISIBILTY_ALWAYS) { - svg_play_texture((SVG_video_stack*)stack, NULL); - gf_sc_invalidate(txh->compositor, NULL); + /*opens stream only at first access to fetch first frame if needed*/ + if (init_vis == SVG_INITIALVISIBILTY_ALWAYS) { + svg_play_texture((SVG_video_stack*)stack, NULL); + gf_sc_invalidate(txh->compositor, NULL); + } } return; } diff --git a/src/compositor/texturing.c b/src/compositor/texturing.c index 4d3a5c7..d207647 100644 --- a/src/compositor/texturing.c +++ b/src/compositor/texturing.c @@ -67,7 +67,7 @@ Bool gf_sc_texture_check_url_change(GF_TextureHandler *txh, MFURL *url) } GF_EXPORT -GF_Err gf_sc_texture_play_from_to(GF_TextureHandler *txh, MFURL *url, Double start_offset, Double end_offset, Bool can_loop, Bool lock_scene_timeline) +GF_Err gf_sc_texture_open(GF_TextureHandler *txh, MFURL *url, Bool lock_scene_timeline) { if (txh->is_open) return GF_BAD_PARAM; @@ -78,6 +78,19 @@ GF_Err gf_sc_texture_play_from_to(GF_TextureHandler *txh, MFURL *url, Double sta txh->stream = gf_mo_register(txh->owner, url, lock_scene_timeline, 0); /*bad/Empty URL*/ if (!txh->stream) return GF_NOT_SUPPORTED; + + return GF_OK; +} + +GF_EXPORT +GF_Err gf_sc_texture_play_from_to(GF_TextureHandler *txh, MFURL *url, Double start_offset, Double end_offset, Bool can_loop, Bool lock_scene_timeline) +{ + if (!txh->stream) { + GF_Err e; + e = gf_sc_texture_open(txh, url, lock_scene_timeline); + if (e != GF_OK) return e; + } + /*request play*/ gf_mo_play(txh->stream, start_offset, end_offset, can_loop); @@ -166,8 +179,8 @@ GF_EXPORT void gf_sc_texture_update_frame(GF_TextureHandler *txh, Bool disable_resync) { Bool needs_reload = 0; - u32 size, ts, ms_until_next; - s32 ms_until_pres; + u32 size, ts; + s32 ms_until_pres, ms_until_next; /*already refreshed*/ if (txh->needs_refresh) return; @@ -223,7 +236,7 @@ void gf_sc_texture_update_frame(GF_TextureHandler *txh, Bool disable_resync) gf_mo_release_data(txh->stream, 0xFFFFFFFF, 0); txh->needs_release = 0; if (!txh->stream_finished) { - if (txh->compositor->next_frame_delay > ms_until_next) + if (ms_until_next>0 && (txh->compositor->next_frame_delay > (u32) ms_until_next)) txh->compositor->next_frame_delay = ms_until_next; } return; diff --git a/src/compositor/texturing.h b/src/compositor/texturing.h index 3bc8058..ab6a618 100644 --- a/src/compositor/texturing.h +++ b/src/compositor/texturing.h @@ -55,6 +55,8 @@ void gf_sc_texture_set_stencil(GF_TextureHandler *hdl, GF_STENCIL stencil); Bool gf_sc_texture_is_transparent(GF_TextureHandler *txh); +void gf_sc_texture_check_pause_on_first_load(GF_TextureHandler *txh); + /*ALL THE FOLLOWING ARE ONLY AVAILABLE IN 3D AND DEAL WITH OPENGL TEXTURE MANAGEMENT*/ #ifndef GPAC_DISABLE_3D diff --git a/src/compositor/texturing_gl.c b/src/compositor/texturing_gl.c index cd213ad..b7189ff 100644 --- a/src/compositor/texturing_gl.c +++ b/src/compositor/texturing_gl.c @@ -52,16 +52,17 @@ enum TX_NEEDS_RASTER_LOAD = (1<<1), /*signal video data must be sent to 3D hw*/ TX_NEEDS_HW_LOAD = (1<<2), + /*OpenGL texturing flags*/ /*these 4 are exclusives*/ - TX_MUST_SCALE = (1<<3), - TX_IS_POW2 = (1<<4), - TX_IS_RECT = (1<<5), - TX_EMULE_POW2 = (1<<6), - TX_EMULE_FIRST_LOAD = (1<<7), + TX_MUST_SCALE = (1<<10), + TX_IS_POW2 = (1<<11), + TX_IS_RECT = (1<<12), + TX_EMULE_POW2 = (1<<13), + TX_EMULE_FIRST_LOAD = (1<<14), - TX_IS_FLIPPED = (1<<8), + TX_IS_FLIPPED = (1<<15), }; @@ -71,6 +72,8 @@ struct __texture_wrapper /*2D texturing*/ GF_STENCIL tx_raster; + //0: not paused, 1: paused, 2: initial pause has been done + u32 init_pause_status; /*3D texturing*/ #ifndef GPAC_DISABLE_3D @@ -87,6 +90,8 @@ struct __texture_wrapper u32 nb_comp, gl_format, gl_type, gl_dtype; Bool yuv_shader; u32 v_id, u_id; + u32 pbo_id, u_pbo_id, v_pbo_id; + Bool pbo_pushed; #endif #ifdef GF_SR_USE_DEPTH char *depth_data; @@ -102,6 +107,29 @@ GF_Err gf_sc_texture_allocate(GF_TextureHandler *txh) return GF_OK; } +static void release_txio(struct __texture_wrapper *tx_io) +{ + +#ifndef GPAC_DISABLE_3D + if (tx_io->id) glDeleteTextures(1, &tx_io->id); + if (tx_io->u_id) glDeleteTextures(1, &tx_io->u_id); + if (tx_io->v_id) glDeleteTextures(1, &tx_io->v_id); + + if (tx_io->pbo_id) glDeleteBuffers(1, &tx_io->pbo_id); + if (tx_io->u_pbo_id) glDeleteBuffers(1, &tx_io->u_pbo_id); + if (tx_io->v_pbo_id) glDeleteBuffers(1, &tx_io->v_pbo_id); + + if (tx_io->scale_data) gf_free(tx_io->scale_data); + if (tx_io->conv_data) gf_free(tx_io->conv_data); +#endif + +#ifdef GF_SR_USE_DEPTH + if (tx_io->depth_data) gf_free(tx_io->depth_data); +#endif + + gf_free(tx_io); +} + void gf_sc_texture_release(GF_TextureHandler *txh) { if (txh->vout_udta && txh->compositor->video_out->ReleaseTexture) { @@ -109,31 +137,76 @@ void gf_sc_texture_release(GF_TextureHandler *txh) txh->vout_udta = NULL; } - if (!txh->tx_io) return; - if (txh->tx_io->tx_raster) { - txh->compositor->rasterizer->stencil_delete(txh->tx_io->tx_raster); - txh->tx_io->tx_raster = NULL; - } + if (txh->tx_io) { + if (txh->tx_io->tx_raster) { + txh->compositor->rasterizer->stencil_delete(txh->tx_io->tx_raster); + txh->tx_io->tx_raster = NULL; + } -#ifndef GPAC_DISABLE_3D - if (txh->tx_io->id) glDeleteTextures(1, &txh->tx_io->id); - if (txh->tx_io->u_id) glDeleteTextures(1, &txh->tx_io->u_id); - if (txh->tx_io->v_id) glDeleteTextures(1, &txh->tx_io->v_id); - if (txh->tx_io->scale_data) gf_free(txh->tx_io->scale_data); - if (txh->tx_io->conv_data) gf_free(txh->tx_io->conv_data); -#endif + if (gf_th_id()==txh->compositor->video_th_id) { + release_txio(txh->tx_io); + } else { + gf_list_add(txh->compositor->textures_gc, txh->tx_io); + } + txh->tx_io=NULL; + } +} -#ifdef GF_SR_USE_DEPTH - if (txh->tx_io->depth_data) gf_free(txh->tx_io->depth_data); -#endif +void gf_sc_texture_cleanup_hw(GF_Compositor *compositor) +{ + while (gf_list_count(compositor->textures_gc)) { + struct __texture_wrapper *tx_io = (struct __texture_wrapper *) gf_list_last(compositor->textures_gc); + gf_list_rem_last(compositor->textures_gc); - gf_free(txh->tx_io); - txh->tx_io = NULL; + release_txio(tx_io); + } } + + GF_Err gf_sc_texture_set_data(GF_TextureHandler *txh) { txh->tx_io->flags |= TX_NEEDS_RASTER_LOAD | TX_NEEDS_HW_LOAD; + +#if !defined(GPAC_DISABLE_3D) && !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) + //PBO mode: start pushing the texture + if (txh->tx_io->pbo_id) { + u8 *ptr; + u32 size = txh->stride*txh->height; + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->pbo_id); + ptr =(u8 *)glMapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB); + if (ptr) memcpy(ptr, txh->data, size); + glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + + if (txh->tx_io->u_pbo_id) { + u8 *pU = txh->pU; + u8 *pV = txh->pV; + if (!pU) pU = (u8 *) txh->data + size; + if (!pV) pV = (u8 *) txh->data + 5*size/4; + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->u_pbo_id); + ptr =(u8 *)glMapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB); + if (ptr) memcpy(ptr, pU, size/4); + glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->v_pbo_id); + ptr =(u8 *)glMapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, GL_WRITE_ONLY_ARB); + if (ptr) memcpy(ptr, pV, size/4); + glUnmapBuffer(GL_PIXEL_UNPACK_BUFFER_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } + + txh->tx_io->pbo_pushed = 1; + + //we just pushed our texture to the GPU, release + if (txh->raw_memory) { + gf_sc_texture_release_stream(txh); + } + } +#endif return GF_OK; } @@ -142,6 +215,7 @@ void gf_sc_texture_reset(GF_TextureHandler *txh) #ifndef GPAC_DISABLE_3D if (txh->tx_io->id) { GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Texturing] Releasing OpenGL texture %d\n", txh->tx_io->id)); + glDeleteTextures(1, &txh->tx_io->id); txh->tx_io->id = 0; if (txh->tx_io->u_id) { @@ -149,6 +223,10 @@ void gf_sc_texture_reset(GF_TextureHandler *txh) glDeleteTextures(1, &txh->tx_io->v_id); txh->tx_io->u_id = txh->tx_io->v_id = 0; } + if (txh->tx_io->pbo_id) glDeleteBuffers(1, &txh->tx_io->pbo_id); + if (txh->tx_io->u_pbo_id) glDeleteBuffers(1, &txh->tx_io->u_pbo_id); + if (txh->tx_io->v_pbo_id) glDeleteBuffers(1, &txh->tx_io->v_pbo_id); + txh->tx_io->pbo_id = txh->tx_io->u_pbo_id = txh->tx_io->v_pbo_id = 0; } txh->tx_io->flags |= TX_NEEDS_HW_LOAD; #endif @@ -226,10 +304,15 @@ void gf_sc_texture_disable(GF_TextureHandler *txh) txh->compositor->visual->current_texture_glsl_program = 0; glActiveTexture(GL_TEXTURE0); glBindTexture(txh->tx_io->gl_type, 0); + + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[GL Texture] Texture drawn (CTS %d)\n", txh->last_frame_time)); + } #endif glDisable(txh->tx_io->gl_type); if (txh->transparent) glDisable(GL_BLEND); + + gf_sc_texture_check_pause_on_first_load(txh); } } @@ -390,7 +473,6 @@ static Bool tx_setup_format(GF_TextureHandler *txh) tx_id[2] = txh->tx_io->v_id; nb_tx = 3; - if (txh->tx_io->flags & TX_IS_RECT) { GLint loc; glUseProgram(compositor->visual->yuv_rect_glsl_program); @@ -413,6 +495,37 @@ static Bool tx_setup_format(GF_TextureHandler *txh) } #endif +#if !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) + if (txh->compositor->gl_caps.pbo && txh->compositor->enable_pbo) { + u32 size = txh->stride*txh->height; + + if (!txh->tx_io->pbo_id && txh->tx_io->id) { + glGenBuffers(1, &txh->tx_io->pbo_id); + if (txh->tx_io->pbo_id) { + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->pbo_id); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size, NULL, GL_DYNAMIC_DRAW_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } + } + if (!txh->tx_io->u_pbo_id && txh->tx_io->u_id) { + glGenBuffers(1, &txh->tx_io->u_pbo_id); + if (txh->tx_io->u_pbo_id) { + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->u_pbo_id); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size/4, NULL, GL_DYNAMIC_DRAW_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } + } + if (!txh->tx_io->v_pbo_id && txh->tx_io->v_id) { + glGenBuffers(1, &txh->tx_io->v_pbo_id); + if (txh->tx_io->v_pbo_id) { + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, txh->tx_io->v_pbo_id); + glBufferData(GL_PIXEL_UNPACK_BUFFER_ARB, size/4, NULL, GL_DYNAMIC_DRAW_ARB); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } + } + } +#endif + if (use_yuv_shaders) { //we use LUMINANCE because GL_RED is not defined on android ... txh->tx_io->gl_format = GL_LUMINANCE; @@ -474,6 +587,9 @@ static Bool tx_setup_format(GF_TextureHandler *txh) glPixelTransferi(GL_RED_SCALE, 64); #endif } else { +#ifndef GPAC_USE_OGL_ES + glPixelTransferi(GL_RED_SCALE, 1); +#endif glPixelStorei(GL_UNPACK_ALIGNMENT, 1); } glDisable(txh->tx_io->gl_type); @@ -699,11 +815,12 @@ assert(txh->data ); #ifndef GPAC_DISABLE_3D -static void do_tex_image_2d(GF_TextureHandler *txh, GLint tx_mode, Bool first_load, u8 *data, u32 stride, u32 w, u32 h) +static void do_tex_image_2d(GF_TextureHandler *txh, GLint tx_mode, Bool first_load, u8 *data, u32 stride, u32 w, u32 h, u32 pbo_id) { Bool needs_stride; if (txh->tx_io->gl_dtype==GL_UNSIGNED_SHORT) { needs_stride = (stride != 2*w*txh->tx_io->nb_comp) ? GF_TRUE : GF_FALSE; + if (needs_stride) stride /= 2; } else { needs_stride = (stride!=w*txh->tx_io->nb_comp) ? GF_TRUE : GF_FALSE; } @@ -716,6 +833,14 @@ static void do_tex_image_2d(GF_TextureHandler *txh, GLint tx_mode, Bool first_lo if (needs_stride) { #endif +#if !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) + if (txh->tx_io->pbo_pushed) { + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, pbo_id); + glTexImage2D(txh->tx_io->gl_type, 0, tx_mode, w, h, 0, txh->tx_io->gl_format, txh->tx_io->gl_dtype, NULL); + glBindBuffer(GL_PIXEL_UNPACK_BUFFER_ARB, 0); + } + else +#endif if (first_load) { glTexImage2D(txh->tx_io->gl_type, 0, tx_mode, w, h, 0, txh->tx_io->gl_format, txh->tx_io->gl_dtype, data); } else { @@ -783,19 +908,13 @@ Bool gf_sc_texture_push_image(GF_TextureHandler *txh, Bool generate_mipmaps, Boo /*force setup of image*/ txh->needs_refresh = 1; tx_setup_format(txh); - first_load = 1; + txh->tx_io->flags |= TX_EMULE_FIRST_LOAD; GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Texturing] Allocating OpenGL texture %d\n", txh->tx_io->id)); } if (!txh->tx_io->gl_type) return 0; /*if data not yet ready don't push the texture*/ if (txh->data) { - - if (txh->tx_io->flags & TX_EMULE_FIRST_LOAD) { - txh->tx_io->flags &= ~TX_EMULE_FIRST_LOAD; - first_load = 1; - } - /*convert image*/ gf_sc_texture_convert(txh); } @@ -805,6 +924,12 @@ Bool gf_sc_texture_push_image(GF_TextureHandler *txh, Bool generate_mipmaps, Boo txh->tx_io->flags &= ~TX_NEEDS_HW_LOAD; data = gf_sc_texture_get_data(txh, &pixel_format); if (!data) return 0; + + if (txh->tx_io->flags & TX_EMULE_FIRST_LOAD) { + txh->tx_io->flags &= ~TX_EMULE_FIRST_LOAD; + first_load = 1; + } + if (txh->tx_io->flags & TX_EMULE_POW2) { w = txh->tx_io->conv_w; h = txh->tx_io->conv_h; @@ -826,32 +951,44 @@ Bool gf_sc_texture_push_image(GF_TextureHandler *txh, Bool generate_mipmaps, Boo #else + gf_sc_texture_check_pause_on_first_load(txh); + /*pow2 texture or hardware support*/ if (! (txh->tx_io->flags & TX_MUST_SCALE) ) { if (txh->tx_io->yuv_shader) { u32 push_time; u8 *pY, *pU, *pV; u32 ck; - pY = data; + pY = (u8 *) data; if (txh->raw_memory) { - if (!txh->pU || !txh->pV) return 0; - - pU = txh->pU; - pV = txh->pV; + assert(txh->pU && txh->pV); + pU = (u8 *) txh->pU; + pV = (u8 *) txh->pV; } else { - pU = pY + txh->height*txh->stride; - pV = pU + txh->height*txh->stride/4; + pU = (u8 *) pY + txh->height*txh->stride; + pV = (u8 *) pU + txh->height*txh->stride/4; + } + +#ifndef GPAC_USE_OGL_ES + if (txh->pixelformat==GF_PIXEL_YV12_10) { + glPixelStorei(GL_UNPACK_ALIGNMENT, 2); + //we use 10 bits but GL will normalise using 16 bits, so we need to multiply the nomralized result by 2^6 + glPixelTransferi(GL_RED_SCALE, 64); } +#endif push_time = gf_sys_clock(); - do_tex_image_2d(txh, tx_mode, first_load, pY, txh->stride, w, h); + do_tex_image_2d(txh, tx_mode, first_load, pY, txh->stride, w, h, txh->tx_io->pbo_id); + GL_CHECK_ERR glBindTexture(txh->tx_io->gl_type, txh->tx_io->u_id); - do_tex_image_2d(txh, tx_mode, first_load, pU, txh->stride/2, w/2, h/2); + do_tex_image_2d(txh, tx_mode, first_load, pU, txh->stride/2, w/2, h/2, txh->tx_io->u_pbo_id); + GL_CHECK_ERR glBindTexture(txh->tx_io->gl_type, txh->tx_io->v_id); - do_tex_image_2d(txh, tx_mode, first_load, pV, txh->stride/2, w/2, h/2); + do_tex_image_2d(txh, tx_mode, first_load, pV, txh->stride/2, w/2, h/2, txh->tx_io->v_pbo_id); + GL_CHECK_ERR push_time = gf_sys_clock() - push_time; @@ -862,20 +999,21 @@ Bool gf_sc_texture_push_image(GF_TextureHandler *txh, Bool generate_mipmaps, Boo txh->nb_frames ++; txh->upload_time += push_time; +#ifndef GPAC_USE_OGL_ES + if (txh->pixelformat==GF_PIXEL_YV12_10) { + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + glPixelTransferi(GL_RED_SCALE, 1); + } +#endif + #ifndef GPAC_DISABLE_LOGS gf_mo_get_object_time(txh->stream, &ck); - GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[GL Texture] Texure (CTS %d) %d ms after due date - Pushed Y,U,V texures in %d ms - average push time %d ms\n", txh->last_frame_time, ck - txh->last_frame_time, push_time, txh->upload_time / txh->nb_frames)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[GL Texture] Texture (CTS %d) %d ms after due date - Pushed Y,U,V textures in %d ms - average push time %d ms (PBO enabled %s)\n", txh->last_frame_time, ck - txh->last_frame_time, push_time, txh->upload_time / txh->nb_frames, txh->tx_io->pbo_pushed ? "yes" : "no")); #endif - //we just pushed our texture to the GPU, release - if (txh->raw_memory) { - gf_sc_texture_release_stream(txh); - } + txh->tx_io->pbo_pushed = 0; } else { - if (first_load) { - glTexImage2D(txh->tx_io->gl_type, 0, tx_mode, w, h, 0, txh->tx_io->gl_format, txh->tx_io->gl_dtype, (unsigned char *) data); - } else { - glTexSubImage2D(txh->tx_io->gl_type, 0, 0, 0, w, h, txh->tx_io->gl_format, txh->tx_io->gl_dtype, (unsigned char *) data); - } + do_tex_image_2d(txh, tx_mode, first_load, (u8 *) data, txh->stride, w, h, txh->tx_io->pbo_id); + txh->tx_io->pbo_pushed = 0; } } else { @@ -913,6 +1051,7 @@ Bool gf_sc_texture_push_image(GF_TextureHandler *txh, Bool generate_mipmaps, Boo } } #endif + return 1; #endif @@ -1488,7 +1627,8 @@ u32 gf_sc_texture_enable_ex(GF_TextureHandler *txh, GF_Node *tx_transform, GF_Re return ret; } #endif - if (!txh || !txh->tx_io) return 0; + if (!txh || !txh->tx_io) + return 0; if (txh->compute_gradient_matrix && gf_sc_texture_needs_reload(txh) ) { compositor_gradient_update(txh); @@ -1517,7 +1657,9 @@ u32 gf_sc_texture_enable_ex(GF_TextureHandler *txh, GF_Node *tx_transform, GF_Re /*use our program*/ Bool is_rect = txh->tx_io->flags & TX_IS_RECT; compositor->visual->current_texture_glsl_program = is_rect ? compositor->visual->yuv_rect_glsl_program : compositor->visual->yuv_glsl_program; + GL_CHECK_ERR glUseProgram(compositor->visual->current_texture_glsl_program); + GL_CHECK_ERR glEnable(txh->tx_io->gl_type); @@ -1566,3 +1708,21 @@ void gf_sc_texture_set_stencil(GF_TextureHandler *txh, GF_STENCIL stencil) txh->tx_io->flags |= TX_NEEDS_HW_LOAD; } +void gf_sc_texture_check_pause_on_first_load(GF_TextureHandler *txh) +{ + if (txh->stream && txh->tx_io) { + switch (txh->tx_io->init_pause_status) { + case 0: + gf_sc_ar_control(txh->compositor->audio_renderer, 0); + txh->tx_io->init_pause_status = 1; + break; + case 1: + gf_sc_ar_control(txh->compositor->audio_renderer, 1); + txh->tx_io->init_pause_status = 2; + break; + default: + break; + } + } +} + diff --git a/src/compositor/visual_manager.h b/src/compositor/visual_manager.h index 115b223..9634d34 100644 --- a/src/compositor/visual_manager.h +++ b/src/compositor/visual_manager.h @@ -152,7 +152,7 @@ struct _visual_manager * Visual Manager part for 3D drawing */ -#if defined( _LP64 ) && defined(CONFIG_DARWIN_GL) +#if defined( _LP64 ) && 0 && defined(CONFIG_DARWIN_GL) #define GF_SHADERID u64 #else #define GF_SHADERID u32 @@ -186,12 +186,13 @@ struct _visual_manager u32 nb_views, current_view, autostereo_type, camera_layout; Bool reverse_views; + GF_SHADERID base_glsl_vertex; + u32 *gl_textures; u32 auto_stereo_width, auto_stereo_height; GF_Mesh *autostereo_mesh; - GF_SHADERID glsl_program; - GF_SHADERID glsl_vertex; - GF_SHADERID glsl_fragment; + GF_SHADERID autostereo_glsl_program; + GF_SHADERID autostereo_glsl_fragment; GF_SHADERID yuv_glsl_program; GF_SHADERID yuv_glsl_fragment; diff --git a/src/compositor/visual_manager_2d.h b/src/compositor/visual_manager_2d.h index dc71c74..a4df5af 100644 --- a/src/compositor/visual_manager_2d.h +++ b/src/compositor/visual_manager_2d.h @@ -58,7 +58,7 @@ typedef struct #define ra_init(ra) { (ra)->count = 0; (ra)->alloc = RA_DEFAULT_STEP; (ra)->list = (GF_RectArrayEntry*)gf_malloc(sizeof(GF_RectArrayEntry)*(ra)->alloc); } /*deletes structure - called as a destructor*/ -#define ra_del(ra) { if ((ra)->list) gf_free((ra)->list); } +#define ra_del(ra) { if ((ra)->list) { gf_free((ra)->list); (ra)->list = NULL; } } /*adds rect to list - expand if needed*/ diff --git a/src/compositor/visual_manager_3d_gl.c b/src/compositor/visual_manager_3d_gl.c index 97ca5e1..6c5be0f 100644 --- a/src/compositor/visual_manager_3d_gl.c +++ b/src/compositor/visual_manager_3d_gl.c @@ -92,6 +92,8 @@ GLDECL_STATIC(glDeleteBuffers); GLDECL_STATIC(glBindBuffer); GLDECL_STATIC(glBufferData); GLDECL_STATIC(glBufferSubData); +GLDECL_STATIC(glMapBuffer); +GLDECL_STATIC(glUnmapBuffer); #endif //LOAD_GL_1_5 #ifdef LOAD_GL_2_0 @@ -188,7 +190,8 @@ void gf_sc_load_opengl_extensions(GF_Compositor *compositor, Bool has_gl_context if (!has_gl_context) return; - /*we have a GL context, get proc addresses*/ + /*we have a GL context, init the rest (proc addresses & co)*/ + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &compositor->gl_caps.max_texture_size); #ifdef LOAD_GL_1_3 if (CHECK_GL_EXT("GL_ARB_multitexture")) { @@ -197,7 +200,7 @@ void gf_sc_load_opengl_extensions(GF_Compositor *compositor, Bool has_gl_context } GET_GLFUN(glBlendEquation); #endif - + #ifdef LOAD_GL_1_4 if (compositor->gl_caps.point_sprite) { GET_GLFUN(glPointParameterf); @@ -213,10 +216,17 @@ void gf_sc_load_opengl_extensions(GF_Compositor *compositor, Bool has_gl_context GET_GLFUN(glBufferData); GET_GLFUN(glBufferSubData); } + if (CHECK_GL_EXT("GL_ARB_pixel_buffer_object")) { + GET_GLFUN(glMapBuffer); + GET_GLFUN(glUnmapBuffer); + + compositor->gl_caps.pbo=1; + } #endif + #ifdef LOAD_GL_2_0 GET_GLFUN(glCreateProgram); @@ -283,21 +293,24 @@ void gf_sc_load_opengl_extensions(GF_Compositor *compositor, Bool has_gl_context #if !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) + static char *default_glsl_vertex = "\ varying vec3 gfNormal;\ varying vec3 gfView;\ + varying vec2 TexCoord;\ void main(void)\ {\ gfView = vec3(gl_ModelViewMatrix * gl_Vertex);\ gfNormal = normalize(gl_NormalMatrix * gl_Normal);\ gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;\ - gl_TexCoord[0] = gl_MultiTexCoord0;\ + TexCoord = gl_MultiTexCoord0.st;\ }"; #ifdef GPAC_UNUSED_FUNC static char *default_glsl_lighting = "\ varying vec3 gfNormal;\ varying vec3 gfView;\ + varying vec2 TexCoord;\ void gpac_lighting (void) \ { \ vec3 L = normalize(gl_LightSource[0].position.xyz - gfView);\ @@ -315,10 +328,11 @@ static char *default_glsl_lighting = "\ static char *glsl_view_anaglyph = "\ uniform sampler2D gfView1;\ uniform sampler2D gfView2;\ + varying vec2 TexCoord;\ void main(void) \ {\ - vec4 col1 = texture2D(gfView1, gl_TexCoord[0].st); \ - vec4 col2 = texture2D(gfView2, gl_TexCoord[0].st); \ + vec4 col1 = texture2D(gfView1, TexCoord.st); \ + vec4 col2 = texture2D(gfView2, TexCoord.st); \ gl_FragColor.r = col1.r;\ gl_FragColor.g = col2.g;\ gl_FragColor.b = col2.b;\ @@ -328,10 +342,11 @@ static char *glsl_view_anaglyph = "\ static char *glsl_view_anaglyph_optimize = "\ uniform sampler2D gfView1;\ uniform sampler2D gfView2;\ + varying vec2 TexCoord;\ void main(void) \ {\ - vec4 col1 = texture2D(gfView1, gl_TexCoord[0].st); \ - vec4 col2 = texture2D(gfView2, gl_TexCoord[0].st); \ + vec4 col1 = texture2D(gfView1, TexCoord.st); \ + vec4 col2 = texture2D(gfView2, TexCoord.st); \ gl_FragColor.r = 0.7*col1.g + 0.3*col1.b;\ gl_FragColor.r = pow(gl_FragColor.r, 1.5);\ gl_FragColor.g = col2.g;\ @@ -342,23 +357,25 @@ static char *glsl_view_anaglyph_optimize = "\ static char *glsl_view_columns = "\ uniform sampler2D gfView1;\ uniform sampler2D gfView2;\ + varying vec2 TexCoord;\ void main(void) \ {\ if ( int( mod(gl_FragCoord.x, 2.0) ) == 0) \ - gl_FragColor = texture2D(gfView1, gl_TexCoord[0].st); \ + gl_FragColor = texture2D(gfView1, TexCoord.st); \ else \ - gl_FragColor = texture2D(gfView2, gl_TexCoord[0].st); \ + gl_FragColor = texture2D(gfView2, TexCoord.st); \ }"; static char *glsl_view_rows = "\ uniform sampler2D gfView1;\ uniform sampler2D gfView2;\ + varying vec2 TexCoord;\ void main(void) \ {\ if ( int( mod(gl_FragCoord.y, 2.0) ) == 0) \ - gl_FragColor = texture2D(gfView1, gl_TexCoord[0].st); \ + gl_FragColor = texture2D(gfView1, TexCoord.st); \ else \ - gl_FragColor = texture2D(gfView2, gl_TexCoord[0].st); \ + gl_FragColor = texture2D(gfView2, TexCoord.st); \ }"; static char *glsl_view_5VSP19 = "\ @@ -367,13 +384,14 @@ static char *glsl_view_5VSP19 = "\ uniform sampler2D gfView3;\ uniform sampler2D gfView4;\ uniform sampler2D gfView5;\ + varying vec2 TexCoord;\ void main(void) {\ vec4 color[5];\ - color[0] = texture2D(gfView5, gl_TexCoord[0].st);\ - color[1] = texture2D(gfView4, gl_TexCoord[0].st);\ - color[2] = texture2D(gfView3, gl_TexCoord[0].st);\ - color[3] = texture2D(gfView2, gl_TexCoord[0].st);\ - color[4] = texture2D(gfView1, gl_TexCoord[0].st);\ + color[0] = texture2D(gfView5, TexCoord.st);\ + color[1] = texture2D(gfView4, TexCoord.st);\ + color[2] = texture2D(gfView3, TexCoord.st);\ + color[3] = texture2D(gfView2, TexCoord.st);\ + color[4] = texture2D(gfView1, TexCoord.st);\ float pitch = 5.0 + 1.0 - mod(gl_FragCoord.y , 5.0);\ int col = int( mod(pitch + 3.0 * (gl_FragCoord.x), 5.0 ) );\ int Vr = int(col);\ @@ -387,102 +405,105 @@ static char *glsl_view_5VSP19 = "\ }"; static char *glsl_yuv_shader = "\ - uniform sampler2D y_plane;\ - uniform sampler2D u_plane;\ - uniform sampler2D v_plane;\ - uniform float alpha;\ - const vec3 offset = vec3(-0.0625, -0.5, -0.5);\ - const vec3 R_mul = vec3(1.164, 0.000, 1.596);\ - const vec3 G_mul = vec3(1.164, -0.391, -0.813);\ - const vec3 B_mul = vec3(1.164, 2.018, 0.000);\ - void main(void) \ - {\ - vec2 texc;\ - vec3 yuv, rgb;\ - texc = gl_TexCoord[0].st;\ - texc.y = 1.0 - texc.y;\ - yuv.x = texture2D(y_plane, texc).r; \ - yuv.y = texture2D(u_plane, texc).r; \ - yuv.z = texture2D(v_plane, texc).r; \ - yuv += offset; \ - rgb.r = dot(yuv, R_mul); \ - rgb.g = dot(yuv, G_mul); \ - rgb.b = dot(yuv, B_mul); \ - gl_FragColor = vec4(rgb, alpha);\ - }"; + uniform sampler2D y_plane;\n\ + uniform sampler2D u_plane;\n\ + uniform sampler2D v_plane;\n\ + uniform float alpha;\n\ + varying vec2 TexCoord;\n\ + const vec3 offset = vec3(-0.0625, -0.5, -0.5);\n\ + const vec3 R_mul = vec3(1.164, 0.000, 1.596);\n\ + const vec3 G_mul = vec3(1.164, -0.391, -0.813);\n\ + const vec3 B_mul = vec3(1.164, 2.018, 0.000);\n\ + void main(void) \n\ + {\n\ + vec2 texc;\n\ + vec3 yuv, rgb;\n\ + texc = TexCoord.st;\n\ + texc.y = 1.0 - texc.y;\n\ + yuv.x = texture2D(y_plane, texc).r;\n\ + yuv.y = texture2D(u_plane, texc).r;\n\ + yuv.z = texture2D(v_plane, texc).r;\n\ + yuv += offset;\n\ + rgb.r = dot(yuv, R_mul);\n\ + rgb.g = dot(yuv, G_mul);\n\ + rgb.b = dot(yuv, B_mul);\n\ + gl_FragColor = vec4(rgb, alpha);\n\ + }\n"; static char *glsl_yuv_rect_shader_strict = "\ #version 140\n\ #extension GL_ARB_texture_rectangle : enable\n\ - uniform sampler2DRect y_plane;\ - uniform sampler2DRect u_plane;\ - uniform sampler2DRect v_plane;\ - uniform float width;\ - uniform float height;\ - uniform float alpha;\ - const vec3 offset = vec3(-0.0625, -0.5, -0.5);\ - const vec3 R_mul = vec3(1.164, 0.000, 1.596);\ - const vec3 G_mul = vec3(1.164, -0.391, -0.813);\ - const vec3 B_mul = vec3(1.164, 2.018, 0.000);\ - out vec4 FragColor;\ - void main(void) \ - {\ - vec2 texc;\ - vec3 yuv, rgb;\ - texc = gl_TexCoord[0].st;\ - texc.y = 1.0 - texc.y;\ - texc.x *= width;\ - texc.y *= height;\ - yuv.x = texture2DRect(y_plane, texc).r; \ - texc.x /= 2.0;\ - texc.y /= 2.0;\ - yuv.y = texture2DRect(u_plane, texc).r; \ - yuv.z = texture2DRect(v_plane, texc).r; \ - yuv += offset; \ - rgb.r = dot(yuv, R_mul); \ - rgb.g = dot(yuv, G_mul); \ - rgb.b = dot(yuv, B_mul); \ - FragColor = vec4(rgb, alpha);\ - }"; + uniform sampler2DRect y_plane;\n\ + uniform sampler2DRect u_plane;\n\ + uniform sampler2DRect v_plane;\n\ + uniform float width;\n\ + uniform float height;\n\ + uniform float alpha;\n\ + in vec2 TexCoord;\n\ + const vec3 offset = vec3(-0.0625, -0.5, -0.5);\n\ + const vec3 R_mul = vec3(1.164, 0.000, 1.596);\n\ + const vec3 G_mul = vec3(1.164, -0.391, -0.813);\n\ + const vec3 B_mul = vec3(1.164, 2.018, 0.000);\n\ + out vec4 FragColor;\n\ + void main(void) \n\ + {\n\ + vec2 texc;\n\ + vec3 yuv, rgb;\n\ + texc = TexCoord.st;\n\ + texc.y = 1.0 - texc.y;\n\ + texc.x *= width;\n\ + texc.y *= height;\n\ + yuv.x = texture2DRect(y_plane, texc).r;\n\ + texc.x /= 2.0;\n\ + texc.y /= 2.0;\n\ + yuv.y = texture2DRect(u_plane, texc).r;\n\ + yuv.z = texture2DRect(v_plane, texc).r;\n\ + yuv += offset;\n\ + rgb.r = dot(yuv, R_mul);\n\ + rgb.g = dot(yuv, G_mul);\n\ + rgb.b = dot(yuv, B_mul);\n\ + FragColor = vec4(rgb, alpha);\n\ + }\n"; static char *glsl_yuv_rect_shader_relaxed= "\ - uniform sampler2DRect y_plane;\ - uniform sampler2DRect u_plane;\ - uniform sampler2DRect v_plane;\ - uniform float width;\ - uniform float height;\ - uniform float alpha;\ - const vec3 offset = vec3(-0.0625, -0.5, -0.5);\ - const vec3 R_mul = vec3(1.164, 0.000, 1.596);\ - const vec3 G_mul = vec3(1.164, -0.391, -0.813);\ - const vec3 B_mul = vec3(1.164, 2.018, 0.000);\ - void main(void) \ - {\ - vec2 texc;\ - vec3 yuv, rgb;\ - texc = gl_TexCoord[0].st;\ - texc.y = 1.0 - texc.y;\ - texc.x *= width;\ - texc.y *= height;\ - yuv.x = texture2DRect(y_plane, texc).r; \ - texc.x /= 2.0;\ - texc.y /= 2.0;\ - yuv.y = texture2DRect(u_plane, texc).r; \ - yuv.z = texture2DRect(v_plane, texc).r; \ - yuv += offset; \ - rgb.r = dot(yuv, R_mul); \ - rgb.g = dot(yuv, G_mul); \ - rgb.b = dot(yuv, B_mul); \ - gl_FragColor = vec4(rgb, alpha);\ - }"; + uniform sampler2DRect y_plane;\n\ + uniform sampler2DRect u_plane;\n\ + uniform sampler2DRect v_plane;\n\ + uniform float width;\n\ + uniform float height;\n\ + uniform float alpha;\n\ + varying vec2 TexCoord;\n\ + const vec3 offset = vec3(-0.0625, -0.5, -0.5);\n\ + const vec3 R_mul = vec3(1.164, 0.000, 1.596);\n\ + const vec3 G_mul = vec3(1.164, -0.391, -0.813);\n\ + const vec3 B_mul = vec3(1.164, 2.018, 0.000);\n\ + void main(void) \n\ + {\n\ + vec2 texc;\n\ + vec3 yuv, rgb;\n\ + texc = TexCoord.st;\n\ + texc.y = 1.0 - texc.y;\n\ + texc.x *= width;\n\ + texc.y *= height;\n\ + yuv.x = texture2DRect(y_plane, texc).r;\n\ + texc.x /= 2.0;\n\ + texc.y /= 2.0;\n\ + yuv.y = texture2DRect(u_plane, texc).r;\n\ + yuv.z = texture2DRect(v_plane, texc).r;\n\ + yuv += offset;\n\ + rgb.r = dot(yuv, R_mul);\n\ + rgb.g = dot(yuv, G_mul);\n\ + rgb.b = dot(yuv, B_mul);\n\ + gl_FragColor = vec4(rgb, alpha);\n\ + }\n"; Bool visual_3d_compile_shader(GF_SHADERID shader_id, const char *name, const char *source) { GLint blen = 0; GLsizei slen = 0; - u32 len; + s32 len; if (!source || !shader_id) return 0; len = (u32) strlen(source); glShaderSource(shader_id, 1, &source, &len); @@ -497,71 +518,83 @@ Bool visual_3d_compile_shader(GF_SHADERID shader_id, const char *name, const cha glGetInfoLogARB(shader_id, blen, &slen, compiler_log); #endif GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[GLSL] Failed to compile shader %s: %s\n", name, compiler_log)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[GLSL] ***** faulty shader code ****\n%s\n**********************\n", source)); gf_free (compiler_log); return 0; } return 1; } +static GF_SHADERID visual_3d_shader_from_source_file(const char *src_path, u32 shader_type) +{ + FILE *src = gf_f64_open(src_path, "rt"); + GF_SHADERID shader = 0; + if (src) { + size_t size; + char *shader_src; + gf_f64_seek(src, 0, SEEK_END); + size = (size_t) gf_f64_tell(src); + gf_f64_seek(src, 0, SEEK_SET); + shader_src = gf_malloc(sizeof(char)*(size+1)); + size = fread(shader_src, 1, size, src); + fclose(src); + if (size != (size_t) -1) { + shader_src[size]=0; + shader = glCreateShader(shader_type); + if (visual_3d_compile_shader(shader, (shader_type == GL_FRAGMENT_SHADER) ? "fragment" : "vertex", shader_src)==GF_FALSE) { + glDeleteShader(shader); + shader = 0; + } + } + gf_free(shader_src); + } else { + GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to open shader file %s\n", src_path)); + } + return shader; +} void visual_3d_init_stereo_shaders(GF_VisualManager *visual) { if (!visual->compositor->gl_caps.has_shaders) return; - if (visual->glsl_program) return; + if (visual->autostereo_glsl_program) return; - visual->glsl_program = glCreateProgram(); + visual->autostereo_glsl_program = glCreateProgram(); - if (!visual->glsl_vertex) { - visual->glsl_vertex = glCreateShader(GL_VERTEX_SHADER); - visual_3d_compile_shader(visual->glsl_vertex, "vertex", default_glsl_vertex); + if (!visual->base_glsl_vertex) { + visual->base_glsl_vertex = glCreateShader(GL_VERTEX_SHADER); + visual_3d_compile_shader(visual->base_glsl_vertex, "vertex", default_glsl_vertex); } switch (visual->autostereo_type) { case GF_3D_STEREO_COLUMNS: - visual->glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->glsl_fragment, "fragment", glsl_view_columns); + visual->autostereo_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); + visual_3d_compile_shader(visual->autostereo_glsl_fragment, "fragment", glsl_view_columns); break; case GF_3D_STEREO_ROWS: - visual->glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->glsl_fragment, "fragment", glsl_view_rows); + visual->autostereo_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); + visual_3d_compile_shader(visual->autostereo_glsl_fragment, "fragment", glsl_view_rows); break; case GF_3D_STEREO_ANAGLYPH: - visual->glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->glsl_fragment, "fragment", glsl_view_anaglyph); + visual->autostereo_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); + visual_3d_compile_shader(visual->autostereo_glsl_fragment, "fragment", glsl_view_anaglyph); break; case GF_3D_STEREO_5VSP19: - visual->glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->glsl_fragment, "fragment", glsl_view_5VSP19); + visual->autostereo_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); + visual_3d_compile_shader(visual->autostereo_glsl_fragment, "fragment", glsl_view_5VSP19); break; case GF_3D_STEREO_CUSTOM: { const char *sOpt = gf_cfg_get_key(visual->compositor->user->config, "Compositor", "InterleaverShader"); if (sOpt) { - FILE *src = gf_f64_open(sOpt, "rt"); - if (src) { - size_t size; - char *shader_src; - gf_f64_seek(src, 0, SEEK_END); - size = (size_t) gf_f64_tell(src); - gf_f64_seek(src, 0, SEEK_SET); - shader_src = gf_malloc(sizeof(char)*(size+1)); - size = fread(shader_src, 1, size, src); - fclose(src); - if (size != (size_t) -1) { - shader_src[size]=0; - visual->glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->glsl_fragment, "fragment", shader_src); - } - gf_free(shader_src); - } + visual->autostereo_glsl_fragment = visual_3d_shader_from_source_file(sOpt, GL_FRAGMENT_SHADER); } } break; } - glAttachShader(visual->glsl_program, visual->glsl_vertex); - glAttachShader(visual->glsl_program, visual->glsl_fragment); - glLinkProgram(visual->glsl_program); + glAttachShader(visual->autostereo_glsl_program, visual->base_glsl_vertex); + glAttachShader(visual->autostereo_glsl_program, visual->autostereo_glsl_fragment); + glLinkProgram(visual->autostereo_glsl_program); } #define DEL_SHADER(_a) if (_a) { glDeleteShader(_a); _a = 0; } @@ -578,15 +611,15 @@ void visual_3d_init_yuv_shader(GF_VisualManager *visual) visual->yuv_glsl_program = glCreateProgram(); - if (!visual->glsl_vertex) { - visual->glsl_vertex = glCreateShader(GL_VERTEX_SHADER); - visual_3d_compile_shader(visual->glsl_vertex, "vertex", default_glsl_vertex); + if (!visual->base_glsl_vertex) { + visual->base_glsl_vertex = glCreateShader(GL_VERTEX_SHADER); + visual_3d_compile_shader(visual->base_glsl_vertex, "vertex", default_glsl_vertex); } visual->yuv_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); - visual_3d_compile_shader(visual->yuv_glsl_fragment, "fragment", glsl_yuv_shader); + visual_3d_compile_shader(visual->yuv_glsl_fragment, "YUV fragment", glsl_yuv_shader); - glAttachShader(visual->yuv_glsl_program, visual->glsl_vertex); + glAttachShader(visual->yuv_glsl_program, visual->base_glsl_vertex); glAttachShader(visual->yuv_glsl_program, visual->yuv_glsl_fragment); glLinkProgram(visual->yuv_glsl_program); @@ -601,24 +634,31 @@ void visual_3d_init_yuv_shader(GF_VisualManager *visual) } glUniform1i(loc, i); } + glUseProgram(0); if (visual->compositor->gl_caps.rect_texture) { Bool res; const char *opt; visual->yuv_rect_glsl_program = glCreateProgram(); - opt = gf_cfg_get_key(visual->compositor->user->config, "Compositor", "YUVShader"); + opt = gf_cfg_get_key(visual->compositor->user->config, "Compositor", "YUVRectShader"); visual->yuv_rect_glsl_fragment = glCreateShader(GL_FRAGMENT_SHADER); if (opt && !strcmp(opt, "Relaxed")) { - res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "fragment", glsl_yuv_rect_shader_relaxed); + res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "YUV Rect fragment (relaxed syntax)", glsl_yuv_rect_shader_relaxed); } else { - res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "fragment", glsl_yuv_rect_shader_strict); + if (opt) { + visual->yuv_rect_glsl_fragment = visual_3d_shader_from_source_file(opt, GL_FRAGMENT_SHADER); + if (!visual->yuv_rect_glsl_fragment) res = GF_FALSE; + } + res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "YUV Rect fragment (strict syntax)", glsl_yuv_rect_shader_strict); if (!res) { - res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "fragment", glsl_yuv_rect_shader_relaxed); + res = visual_3d_compile_shader(visual->yuv_rect_glsl_fragment, "YUV Rect fragment (relaxed syntax)", glsl_yuv_rect_shader_relaxed); if (res) { - gf_cfg_set_key(visual->compositor->user->config, "Compositor", "YUVShader", "Relaxed"); - GF_LOG(GF_LOG_WARNING, GF_LOG_COMPOSE, ("[Compositor] Using relaxed syntaxed version of YUV shader\n")); + if (!opt) gf_cfg_set_key(visual->compositor->user->config, "Compositor", "YUVRectShader", "Relaxed"); + GF_LOG(GF_LOG_WARNING, GF_LOG_COMPOSE, ("[Compositor] Using relaxed syntax version of YUV shader\n")); } + } else { + GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Compositor] Using strict syntax version of YUV shader\n")); } } if (!res) { @@ -628,7 +668,7 @@ void visual_3d_init_yuv_shader(GF_VisualManager *visual) } if (visual->yuv_rect_glsl_program) { - glAttachShader(visual->yuv_rect_glsl_program, visual->glsl_vertex); + glAttachShader(visual->yuv_rect_glsl_program, visual->base_glsl_vertex); glAttachShader(visual->yuv_rect_glsl_program, visual->yuv_rect_glsl_fragment); glLinkProgram(visual->yuv_rect_glsl_program); @@ -643,10 +683,20 @@ void visual_3d_init_yuv_shader(GF_VisualManager *visual) } glUniform1i(loc, i); } + + loc = glGetUniformLocation(visual->yuv_rect_glsl_program, "width"); + if (loc == -1) { + GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to locate width in YUV shader\n")); + } + + loc = glGetUniformLocation(visual->yuv_rect_glsl_program, "height"); + if (loc == -1) { + GF_LOG(GF_LOG_ERROR, GF_LOG_COMPOSE, ("[Compositor] Failed to locate width in YUV shader\n")); + } + + glUseProgram(0); } } - - glUseProgram(0); } #endif // !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) @@ -655,11 +705,11 @@ void visual_3d_reset_graphics(GF_VisualManager *visual) { #if !defined(GPAC_USE_TINYGL) && !defined(GPAC_USE_OGL_ES) - DEL_SHADER(visual->glsl_vertex); - DEL_SHADER(visual->glsl_fragment); + DEL_SHADER(visual->base_glsl_vertex); + DEL_SHADER(visual->autostereo_glsl_fragment); DEL_SHADER(visual->yuv_glsl_fragment); - DEL_PROGRAM(visual->glsl_program ); + DEL_PROGRAM(visual->autostereo_glsl_program ); DEL_PROGRAM(visual->yuv_glsl_program ); if (visual->gl_textures) { @@ -776,10 +826,10 @@ void visual_3d_end_auto_stereo_pass(GF_VisualManager *visual) glMatrixMode(GL_MODELVIEW); /*use our program*/ - glUseProgram(visual->glsl_program); + glUseProgram(visual->autostereo_glsl_program); /*push number of views if shader uses it*/ - loc = glGetUniformLocation(visual->glsl_program, "gfViewCount"); + loc = glGetUniformLocation(visual->autostereo_glsl_program, "gfViewCount"); if (loc != -1) glUniform1i(loc, visual->nb_views); glClientActiveTexture(GL_TEXTURE0); @@ -789,7 +839,7 @@ void visual_3d_end_auto_stereo_pass(GF_VisualManager *visual) /*bind all our textures*/ for (i=0; inb_views; i++) { sprintf(szTex, "gfView%d", i+1); - loc = glGetUniformLocation(visual->glsl_program, szTex); + loc = glGetUniformLocation(visual->autostereo_glsl_program, szTex); if (loc == -1) continue; glActiveTexture(GL_TEXTURE0 + i); @@ -2504,7 +2554,7 @@ void visual_3d_point_sprite(GF_VisualManager *visual, Drawable *stack, GF_Textur Float z; static GLfloat none[3] = { 1.0f, 0, 0 }; - data = gf_sc_texture_get_data(txh, &pixel_format); + data = (u8 *) gf_sc_texture_get_data(txh, &pixel_format); if (!data) return; if (pixel_format!=GF_PIXEL_RGBD) return; stride = txh->stride; @@ -2553,7 +2603,7 @@ void visual_3d_point_sprite(GF_VisualManager *visual, Drawable *stack, GF_Textur if (!delta) first_pass = 2; else first_pass = 1; - data = gf_sc_texture_get_data(txh, &pixel_format); + data = (u8 *) gf_sc_texture_get_data(txh, &pixel_format); if (!data) return; if (pixel_format!=GF_PIXEL_RGBD) return; stride = txh->stride; @@ -2577,7 +2627,7 @@ restart: in_strip = 0; for (h=0; hheight - 1; h++) { - char *src = data + h*stride; + u8 *src = data + h*stride; x = -1; x = gf_mulfix(x, INT2FIX(txh->width/2)); if (!tr_state->pixel_metrics) x = gf_divfix(x, tr_state->min_hsize); @@ -2689,13 +2739,13 @@ restart: Fixed f_scale = FLT2FIX(visual->compositor->depth_gl_scale); txh->needs_refresh = 0; - data = gf_sc_texture_get_data(txh, &pixel_format); + data = (u8 *) gf_sc_texture_get_data(txh, &pixel_format); if (!data) return; if (pixel_format!=GF_PIXEL_RGB_24_DEPTH) return; data += txh->height*txh->width*3; for (h=0; hheight; h++) { - char *src = data + h * txh->width; + u8 *src = data + h * txh->width; for (w=0; wwidth; w++) { u8 d = src[w]; Fixed z = INT2FIX(d); diff --git a/src/export.cpp b/src/export.cpp index 15d061f..1899362 100644 --- a/src/export.cpp +++ b/src/export.cpp @@ -47,6 +47,7 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_rmdir) ) #pragma comment (linker, EXPORT_SYMBOL(gf_cleanup_dir) ) #pragma comment (linker, EXPORT_SYMBOL(gf_sys_clock) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_sys_clock_high_res) ) #pragma comment (linker, EXPORT_SYMBOL(gf_sys_get_rti) ) #pragma comment (linker, EXPORT_SYMBOL(gf_sys_get_battery_state) ) #pragma comment (linker, EXPORT_SYMBOL(gf_get_default_cache_directory) ) @@ -511,6 +512,7 @@ /*color.h exports*/ #pragma comment (linker, EXPORT_SYMBOL(gf_stretch_bits) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_color_write_yv12_10_to_yuv) ) #pragma comment (linker, EXPORT_SYMBOL(gf_cmx_init) ) #pragma comment (linker, EXPORT_SYMBOL(gf_cmx_set) ) #pragma comment (linker, EXPORT_SYMBOL(gf_cmx_set_all) ) @@ -790,6 +792,8 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_last_producer_time_box) ) #pragma comment (linker, EXPORT_SYMBOL(gf_isom_set_sync_table) ) #pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_current_tfdt) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_isom_is_adobe_protection_media) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_adobe_protection_info) ) # ifndef GPAC_DISABLE_ISOM_DUMP #pragma comment (linker, EXPORT_SYMBOL(gf_isom_dump) ) @@ -1238,6 +1242,7 @@ #ifndef GPAC_DISABLE_ISOM_WRITE #pragma comment (linker, EXPORT_SYMBOL(gf_media_change_par) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_media_remove_non_rap) ) #endif #ifndef GPAC_DISABLE_AV_PARSERS @@ -1256,6 +1261,7 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_media_hevc_read_pps) ) #pragma comment (linker, EXPORT_SYMBOL(gf_hevc_get_sps_info) ) #pragma comment (linker, EXPORT_SYMBOL(gf_hevc_get_profile_name) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_hevc_get_sps_info_with_state) ) #endif //GPAC_DISABLE_HEVC @@ -1518,7 +1524,7 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_dom_listener_build) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dom_listener_del) ) #pragma comment (linker, EXPORT_SYMBOL(gf_sg_handle_dom_event) ) -#pragma comment (linker, EXPORT_SYMBOL(sg_fire_dom_event) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_sg_fire_dom_event) ) #pragma comment (linker, EXPORT_SYMBOL(gf_mo_event_target_get) ) #pragma comment (linker, EXPORT_SYMBOL(gf_mo_event_target_remove_by_node) ) #pragma comment (linker, EXPORT_SYMBOL(gf_event_target_get_node) ) @@ -1892,6 +1898,8 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_dash_get_group_udta) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_set_group_udta) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_is_group_selected) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_dash_is_group_selectable) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_dash_debug_group) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_get_info) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_switch_quality) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_get_duration) ) @@ -1931,12 +1939,15 @@ #pragma comment (linker, EXPORT_SYMBOL(gf_dash_set_max_resolution) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_set_min_timeout_between_404) ) #pragma comment (linker, EXPORT_SYMBOL(gf_dash_set_segment_expiration_threshold) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_dash_set_user_buffer) ) #endif #ifndef GPAC_DISABLE_VTT #pragma comment (linker, EXPORT_SYMBOL(gf_webvtt_js_addCue) ) #pragma comment (linker, EXPORT_SYMBOL(gf_webvtt_js_removeCues) ) #pragma comment (linker, EXPORT_SYMBOL(gf_webvtt_parse_cues_from_data) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_webvtt_parse_iso_cues) ) +#pragma comment (linker, EXPORT_SYMBOL(gf_webvtt_cue_del) ) #endif #ifndef GPAC_DISABLE_MSE diff --git a/src/ietf/rtp_depacketizer.c b/src/ietf/rtp_depacketizer.c index f0501a1..9fd8569 100644 --- a/src/ietf/rtp_depacketizer.c +++ b/src/ietf/rtp_depacketizer.c @@ -806,7 +806,7 @@ static void gf_rtp_parse_hevc(GF_RTPDepacketizer *rtp, GF_RTPHeader *hdr, char * u32 nal_size = (u8) payload[offset]; nal_size<<=8; nal_size |= (u8) payload[offset+1]; offset += 2; nal_type = (payload[offset] & 0x7E) >> 1; - if ((nal_type==GF_HEVC_NALU_SLICE_IDR_W_DLP) || (nal_type==GF_HEVC_NALU_SLICE_IDR_N_LP)) { + if ((nal_type>=GF_HEVC_NALU_SLICE_BLA_W_LP) && (nal_type<=GF_HEVC_NALU_SLICE_CRA)) { rtp->sl_hdr.randomAccessPointFlag = 1; } @@ -829,7 +829,7 @@ static void gf_rtp_parse_hevc(GF_RTPDepacketizer *rtp, GF_RTPHeader *hdr, char * if (is_start) gf_rtp_hevc_flush(rtp, hdr, 1); nal_type = payload[2] & 0x3F; - if ((nal_type==GF_HEVC_NALU_SLICE_IDR_W_DLP) || (nal_type==GF_HEVC_NALU_SLICE_IDR_N_LP)) { + if ((nal_type>=GF_HEVC_NALU_SLICE_BLA_W_LP) && (nal_type<=GF_HEVC_NALU_SLICE_CRA)) { rtp->sl_hdr.randomAccessPointFlag = 1; } @@ -1044,6 +1044,7 @@ static u32 gf_rtp_get_payload_type(GF_RTPMap *map, GF_SDPMedia *media) else if (!stricmp(map->payload_name, "ac3")) return GF_RTP_PAYT_AC3; else if (!stricmp(map->payload_name, "H264-SVC")) return GF_RTP_PAYT_H264_SVC; else if (!stricmp(map->payload_name, "H265")) return GF_RTP_PAYT_HEVC; + else if (!stricmp(map->payload_name, "H265-SHVC")) return GF_RTP_PAYT_SHVC; else return 0; } @@ -1521,6 +1522,7 @@ static GF_Err gf_rtp_payt_setup(GF_RTPDepacketizer *rtp, GF_RTPMap *map, GF_SDPM rtp->depacketize = gf_rtp_parse_h264; break; case GF_RTP_PAYT_HEVC: + case GF_RTP_PAYT_SHVC: #ifndef GPAC_DISABLE_HEVC { GF_SDP_FMTP *fmtp; diff --git a/src/ietf/rtp_packetizer.c b/src/ietf/rtp_packetizer.c index 3ff028a..22e175e 100644 --- a/src/ietf/rtp_packetizer.c +++ b/src/ietf/rtp_packetizer.c @@ -118,6 +118,7 @@ GF_Err gf_rtp_builder_process(GP_RTPPacketizer *builder, char *data, u32 data_si case GF_RTP_PAYT_AC3: return gp_rtp_builder_do_ac3(builder, data, data_size, IsAUEnd, FullAUSize); case GF_RTP_PAYT_HEVC: + case GF_RTP_PAYT_SHVC: return gp_rtp_builder_do_hevc(builder, data, data_size, IsAUEnd, FullAUSize); default: return GF_NOT_SUPPORTED; @@ -531,6 +532,10 @@ Bool gf_rtp_builder_get_payload_name(GP_RTPPacketizer *rtpb, char *szPayloadName strcpy(szMediaName, "video"); strcpy(szPayloadName, "H265"); return 1; + case GF_RTP_PAYT_SHVC: + strcpy(szMediaName, "video"); + strcpy(szPayloadName, "H265-SHVC"); + return 1; default: strcpy(szMediaName, ""); strcpy(szPayloadName, ""); diff --git a/src/ietf/rtp_pck_mpeg4.c b/src/ietf/rtp_pck_mpeg4.c index 0aac1a2..cfffc69 100644 --- a/src/ietf/rtp_pck_mpeg4.c +++ b/src/ietf/rtp_pck_mpeg4.c @@ -543,7 +543,7 @@ GF_Err gp_rtp_builder_do_hevc(GP_RTPPacketizer *builder, char *nalu, u32 nalu_si if (!nalu) do_flush = 1; else if (builder->sl_header.accessUnitStartFlag) do_flush = 1; /*we must NOT fragment a NALU*/ - else if (builder->bytesInPacket + nalu_size + 4 >= builder->Path_MTU) do_flush = 2; + else if (builder->bytesInPacket + nalu_size + 4 >= builder->Path_MTU) do_flush = 2; //2 bytes PayloadHdr for AP + 2 bytes NAL size /*aggregation is disabled*/ else if (! (builder->flags & GP_RTP_PCK_USE_MULTI) ) do_flush = 2; @@ -633,7 +633,7 @@ GF_Err gp_rtp_builder_do_hevc(GP_RTPPacketizer *builder, char *nalu, u32 nalu_si char payload_hdr[2]; char shdr; - assert(nalu_size>=builder->Path_MTU); + assert(nalu_size + 4 >=builder->Path_MTU); assert(!builder->bytesInPacket); /*FU payload doesn't have the NAL hdr (2 bytes*/ diff --git a/src/ietf/rtp_streamer.c b/src/ietf/rtp_streamer.c index e2e7a0c..4126278 100644 --- a/src/ietf/rtp_streamer.c +++ b/src/ietf/rtp_streamer.c @@ -307,6 +307,12 @@ GF_RTPStreamer *gf_rtp_streamer_new_extended(u32 streamType, u32 oti, u32 timeSc rtp_type = GF_RTP_PAYT_HEVC; PL_ID = 0x0F; break; + /*SHVC*/ + case GPAC_OTI_VIDEO_SHVC: + required_rate = 90000; /* "90 kHz clock rate MUST be used"*/ + rtp_type = GF_RTP_PAYT_SHVC; + PL_ID = 0x0F; + break; } break; @@ -617,7 +623,7 @@ GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char * strcat(sdpLine, "\n"); } } - else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) { + else if ((rtp->packetizer->rtp_payt == GF_RTP_PAYT_HEVC) || (rtp->packetizer->rtp_payt == GF_RTP_PAYT_SHVC)) { #ifndef GPAC_DISABLE_HEVC GF_HEVCConfig *hevcc = dsi ? gf_odf_hevc_cfg_read(dsi, dsi_len, 0) : NULL; if (hevcc) { diff --git a/src/isomedia/avc_ext.c b/src/isomedia/avc_ext.c index 98b29c8..cd0b263 100644 --- a/src/isomedia/avc_ext.c +++ b/src/isomedia/avc_ext.c @@ -76,29 +76,6 @@ static void rewrite_nalus_list(GF_List *nalus, GF_BitStream *bs, Bool rewrite_st } } -/* FIXME : unused function -static void merge_nalus_list(GF_List *src, GF_List *dst) -{ - u32 i, count = gf_list_count(src); - for (i=0; iavc_config) { - merge_nalus_list(entry->avc_config->config->sequenceParameterSets, sps); - merge_nalus_list(entry->avc_config->config->sequenceParameterSetExtensions, sps); - merge_nalus_list(entry->avc_config->config->pictureParameterSets, pps); - } - if (entry->svc_config) { - merge_nalus_list(entry->svc_config->config->sequenceParameterSets, sps); - merge_nalus_list(entry->svc_config->config->pictureParameterSets, pps); - } -}*/ static GF_Err process_extractor(GF_ISOFile *file, u32 sampleNumber, u32 nal_size, u16 nal_hdr, u32 nal_unit_size_field, Bool is_hevc, Bool rewrite_ps, Bool rewrite_start_codes, GF_BitStream *src_bs, GF_BitStream *dst_bs, u32 extractor_mode) { @@ -125,10 +102,12 @@ static GF_Err process_extractor(GF_ISOFile *file, u32 sampleNumber, u32 nal_size if (!ref_track_num) return GF_ISOM_INVALID_FILE; cur_extract_mode = gf_isom_get_nalu_extract_mode(file, ref_track_num); + + //we must be in inspect mode only otherwise the reference sample will not be the one stored on file (change in start codes, PS inserted or other NALUs inserted) + //and this will corrupt extraction (wrong data offsets) ref_extract_mode = GF_ISOM_NALU_EXTRACT_INSPECT; - if (rewrite_ps) - ref_extract_mode |= GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG; gf_isom_set_nalu_extract_mode(file, ref_track_num, ref_extract_mode); + ref_trak = gf_isom_get_track_from_file(file, ref_track_num); if (!ref_trak) return GF_ISOM_INVALID_FILE; @@ -139,6 +118,7 @@ static GF_Err process_extractor(GF_ISOFile *file, u32 sampleNumber, u32 nal_size //are there cases were this wouldn't be the case ? if (sample_offset < -sample_offset) sample_offset = 0; + e = Media_GetSample(ref_trak->Media, sampleNumber + sample_offset, &ref_samp, &di, 0, NULL); if (e) return e; @@ -208,7 +188,7 @@ static GF_Err process_extractor(GF_ISOFile *file, u32 sampleNumber, u32 nal_size return GF_OK; } -static Bool is_sample_idr(GF_ISOSample *sample, GF_MPEGVisualSampleEntryBox *entry) +static u8 is_sample_idr(GF_ISOSample *sample, GF_MPEGVisualSampleEntryBox *entry) { Bool is_hevc = 0; u32 nalu_size_field = 0; @@ -242,9 +222,11 @@ static Bool is_sample_idr(GF_ISOSample *sample, GF_MPEGVisualSampleEntryBox *ent case GF_HEVC_NALU_SLICE_BLA_W_LP: case GF_HEVC_NALU_SLICE_BLA_W_DLP: case GF_HEVC_NALU_SLICE_BLA_N_LP: + case GF_HEVC_NALU_SLICE_CRA: + gf_bs_del(bs); + return 3; case GF_HEVC_NALU_SLICE_IDR_W_DLP: case GF_HEVC_NALU_SLICE_IDR_N_LP: - case GF_HEVC_NALU_SLICE_CRA: gf_bs_del(bs); return 1; case GF_HEVC_NALU_ACCESS_UNIT: @@ -284,13 +266,46 @@ static Bool is_sample_idr(GF_ISOSample *sample, GF_MPEGVisualSampleEntryBox *ent return 0; } -/* Rewrite mode: - * mode = 0: playback - * mode = 1: streaming - */ +static void nalu_merge_ps(GF_BitStream *ps_bs, Bool rewrite_start_codes, u32 nal_unit_size_field, GF_MPEGVisualSampleEntryBox *entry, Bool is_hevc) +{ + u32 i, count; + if (is_hevc) { + if (entry->hevc_config) { + count = gf_list_count(entry->hevc_config->config->param_array); + for (i=0; ihevc_config->config->param_array, i); + rewrite_nalus_list(ar->nalus, ps_bs, rewrite_start_codes, nal_unit_size_field); + } + } + if (entry->shvc_config) { + count = gf_list_count(entry->shvc_config->config->param_array); + for (i=0; ishvc_config->config->param_array, i); + rewrite_nalus_list(ar->nalus, ps_bs, rewrite_start_codes, nal_unit_size_field); + } + } + } else { + if (entry->avc_config) { + rewrite_nalus_list(entry->avc_config->config->sequenceParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); + rewrite_nalus_list(entry->avc_config->config->sequenceParameterSetExtensions, ps_bs, rewrite_start_codes, nal_unit_size_field); + rewrite_nalus_list(entry->avc_config->config->pictureParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); + } + + /*add svc config */ + if (entry->svc_config) { + rewrite_nalus_list(entry->svc_config->config->sequenceParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); + rewrite_nalus_list(entry->svc_config->config->pictureParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); + } + } +} + + GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 sampleNumber, GF_MPEGVisualSampleEntryBox *entry) { Bool is_hevc = 0; + //if only one sync given in the sample sync table, insert sps/pps/vps before cra/bla in hevc + Bool check_cra_bla = (mdia->information->sampleTable->SyncSample && mdia->information->sampleTable->SyncSample->nb_entries>1) ? 0 : 1; + Bool insert_nalu_delim = 1; GF_Err e = GF_OK; GF_ISOSample *ref_samp; GF_BitStream *src_bs, *ref_bs, *dst_bs, *ps_bs; @@ -314,12 +329,16 @@ GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 rewrite_start_codes = (mdia->mediaTrack->extractor_mode & GF_ISOM_NALU_EXTRACT_ANNEXB_FLAG) ? 1 : 0; insert_vdrd_code = (mdia->mediaTrack->extractor_mode & GF_ISOM_NALU_EXTRACT_VDRD_FLAG) ? 1 : 0; - if (!entry->svc_config) insert_vdrd_code = 0; + if (!entry->svc_config && !entry->shvc_config) insert_vdrd_code = 0; extractor_mode = mdia->mediaTrack->extractor_mode&0x0000FFFF; if (extractor_mode != GF_ISOM_NALU_EXTRACT_LAYER_ONLY) insert_vdrd_code = 0; + //this is a compatible HEVC, don't insert VDRD, insert NALU delim + if (entry->shvc_config && entry->hevc_config) + insert_vdrd_code = 0; + if (extractor_mode == GF_ISOM_NALU_EXTRACT_INSPECT) { if (!rewrite_ps && !rewrite_start_codes) return GF_OK; @@ -370,7 +389,7 @@ GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 } } //AVC/HEVC base, insert NALU delim - else { + else if (insert_nalu_delim) { gf_bs_write_int(dst_bs, 1, 32); if (is_hevc) { #ifndef GPAC_DISABLE_HEVC @@ -391,26 +410,30 @@ GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 } if (rewrite_ps) { - if (is_hevc) { - u32 i, count; - - if (entry->hevc_config) { - count = gf_list_count(entry->hevc_config->config->param_array); - for (i=0; ihevc_config->config->param_array, i); - rewrite_nalus_list(ar->nalus, ps_bs, rewrite_start_codes, nal_unit_size_field); - } - } - if (entry->shvc_config) { - count = gf_list_count(entry->shvc_config->config->param_array); - for (i=0; ishvc_config->config->param_array, i); - rewrite_nalus_list(ar->nalus, ps_bs, rewrite_start_codes, nal_unit_size_field); + //in inspect mode or single-layer mode just use the xPS from this layer + if (extractor_mode == GF_ISOM_NALU_EXTRACT_DEFAULT) { + u32 i; + GF_TrackReferenceTypeBox *scal = NULL; + Track_FindRef(mdia->mediaTrack, GF_4CC('s','c','a','l'), &scal); + + if (scal) { + for (i=0; itrackIDCount; i++) { + GF_TrackBox *a_track = GetTrackbyID(mdia->mediaTrack->moov, scal->trackIDs[i]); + GF_MPEGVisualSampleEntryBox *an_entry = NULL; + if (a_track && a_track->Media && a_track->Media->information && a_track->Media->information->sampleTable && a_track->Media->information->sampleTable->SampleDescription) + an_entry = gf_list_get(a_track->Media->information->sampleTable->SampleDescription->other_boxes, 0); + + if (an_entry) + nalu_merge_ps(ps_bs, rewrite_start_codes, nal_unit_size_field, an_entry, is_hevc); } } + } + nalu_merge_ps(ps_bs, rewrite_start_codes, nal_unit_size_field, entry, is_hevc); + + if (is_hevc) { /*little optimization if we are not asked to start codes: copy over the sample*/ - if (!rewrite_start_codes) { + if (!rewrite_start_codes && !entry->shvc_config) { if (ps_bs) { u8 nal_type = (sample->data[nal_unit_size_field] & 0x7E) >> 1; //temp fix - if we detect xPS in the begining of the sample do NOT copy the ps bitstream @@ -437,18 +460,6 @@ GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 gf_bs_del(dst_bs); return GF_OK; } - } else { - if (entry->avc_config) { - rewrite_nalus_list(entry->avc_config->config->sequenceParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); - rewrite_nalus_list(entry->avc_config->config->sequenceParameterSetExtensions, ps_bs, rewrite_start_codes, nal_unit_size_field); - rewrite_nalus_list(entry->avc_config->config->pictureParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); - } - - /*add svc config */ - if (entry->svc_config) { - rewrite_nalus_list(entry->svc_config->config->sequenceParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); - rewrite_nalus_list(entry->svc_config->config->pictureParameterSets, ps_bs, rewrite_start_codes, nal_unit_size_field); - } } } @@ -511,6 +522,24 @@ GF_Err gf_isom_nalu_sample_rewrite(GF_MediaBox *mdia, GF_ISOSample *sample, u32 case GF_HEVC_NALU_SLICE_STSA_R: if (temporal_id < (nal_hdr & 0x7)) temporal_id = (nal_hdr & 0x7); + + case GF_HEVC_NALU_SLICE_BLA_W_LP: + case GF_HEVC_NALU_SLICE_BLA_W_DLP: + case GF_HEVC_NALU_SLICE_BLA_N_LP: + case GF_HEVC_NALU_SLICE_IDR_W_DLP: + case GF_HEVC_NALU_SLICE_IDR_N_LP: + case GF_HEVC_NALU_SLICE_CRA: + //insert xPS before CRA/BLS + if (check_cra_bla && !sample->IsRAP) { + if (ref_samp) gf_isom_sample_del(&ref_samp); + if (src_bs) gf_bs_del(src_bs); + if (ref_bs) gf_bs_del(ref_bs); + if (dst_bs) gf_bs_del(dst_bs); + if (buffer) gf_free(buffer); + + sample->IsRAP=3; + return gf_isom_nalu_sample_rewrite(mdia, sample, sampleNumber, entry); + } default: /*rewrite nal*/ gf_bs_read_data(src_bs, buffer, nal_size-2); @@ -656,8 +685,105 @@ static GF_AVCConfig *AVC_DuplicateConfig(GF_AVCConfig *cfg) return cfg_new; } +static void merge_avc_config(GF_AVCConfig *dst_cfg, GF_AVCConfig *src_cfg) +{ + GF_AVCConfig *cfg = AVC_DuplicateConfig(src_cfg); + while (gf_list_count(cfg->sequenceParameterSets)) { + GF_AVCConfigSlot *p = (GF_AVCConfigSlot*)gf_list_get(cfg->sequenceParameterSets, 0); + gf_list_rem(cfg->sequenceParameterSets, 0); + gf_list_insert(dst_cfg->sequenceParameterSets, p, 0); + } + while (gf_list_count(cfg->pictureParameterSets)) { + GF_AVCConfigSlot *p = (GF_AVCConfigSlot*)gf_list_get(cfg->pictureParameterSets, 0); + gf_list_rem(cfg->pictureParameterSets, 0); + gf_list_insert(dst_cfg->pictureParameterSets, p, 0); + } + gf_odf_avc_cfg_del(cfg); +} + +void merge_hevc_config(GF_HEVCConfig *dst_cfg, GF_HEVCConfig *src_cfg, Bool force_insert) +{ + GF_HEVCConfig *cfg = HEVC_DuplicateConfig(src_cfg); + //merge all xPS + u32 i, j, count = cfg->param_array ? gf_list_count(cfg->param_array) : 0; + for (i=0; iparam_array ? gf_list_count(dst_cfg->param_array) : 0; + GF_HEVCParamArray *ar = gf_list_get(cfg->param_array, i); + for (j=0; jparam_array, j); + if (ar_h->type==ar->type) { + break; + } + ar_h = NULL; + } + if (!ar_h) { + gf_list_add(dst_cfg->param_array, ar); + gf_list_rem(cfg->param_array, i); + count--; + i--; + } else { + while (gf_list_count(ar->nalus)) { + GF_AVCConfigSlot *p = (GF_AVCConfigSlot*)gf_list_get(ar->nalus, 0); + gf_list_rem(ar->nalus, 0); + if (force_insert) + gf_list_insert(ar_h->nalus, p, 0); + else + gf_list_add(ar_h->nalus, p); + } -void AVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *avc) + } + } + gf_odf_hevc_cfg_del(cfg); + +#define CHECK_CODE(__code) if (dst_cfg->__code < src_cfg->__code) dst_cfg->__code = src_cfg->__code; + + CHECK_CODE(configurationVersion) + CHECK_CODE(profile_idc) + CHECK_CODE(profile_space) + CHECK_CODE(tier_flag) + CHECK_CODE(general_profile_compatibility_flags) + CHECK_CODE(progressive_source_flag) + CHECK_CODE(interlaced_source_flag) + CHECK_CODE(constraint_indicator_flags) + CHECK_CODE(level_idc) + CHECK_CODE(min_spatial_segmentation_idc) + +} + +void merge_all_config(GF_AVCConfig *avc_cfg, GF_HEVCConfig *hevc_cfg, GF_MediaBox *mdia) +{ + u32 i; + GF_TrackReferenceTypeBox *scal = NULL; + Track_FindRef(mdia->mediaTrack, GF_4CC('s','c','a','l'), &scal); + + if (!scal) return; + + for (i=0; itrackIDCount; i++) { + GF_TrackBox *a_track = GetTrackbyID(mdia->mediaTrack->moov, scal->trackIDs[i]); + GF_MPEGVisualSampleEntryBox *an_entry = NULL; + if (a_track && a_track->Media && a_track->Media->information && a_track->Media->information->sampleTable && a_track->Media->information->sampleTable->SampleDescription) + an_entry = gf_list_get(a_track->Media->information->sampleTable->SampleDescription->other_boxes, 0); + + if (!an_entry) continue; + + if (avc_cfg && an_entry->svc_config && an_entry->svc_config->config) + merge_avc_config(avc_cfg, an_entry->svc_config->config); + + if (avc_cfg && an_entry->avc_config && an_entry->avc_config->config) + merge_avc_config(avc_cfg, an_entry->avc_config->config); + + if (hevc_cfg && an_entry->shvc_config && an_entry->shvc_config->config) + merge_hevc_config(hevc_cfg, an_entry->shvc_config->config, GF_TRUE); + + if (hevc_cfg && an_entry->hevc_config && an_entry->hevc_config->config) + merge_hevc_config(hevc_cfg, an_entry->hevc_config->config, GF_TRUE); + } + + if (hevc_cfg) hevc_cfg->is_shvc = 0; +} + +void AVC_RewriteESDescriptorEx(GF_MPEGVisualSampleEntryBox *avc, GF_MediaBox *mdia) { GF_AVCConfig *avcc, *svcc; if (avc->emul_esd) gf_odf_desc_del((GF_Descriptor *)avc->emul_esd); @@ -689,31 +815,30 @@ void AVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *avc) avcc = avc->avc_config->config ? AVC_DuplicateConfig(avc->avc_config->config) : NULL; /*merge SVC config*/ if (avc->svc_config) { - svcc = AVC_DuplicateConfig(avc->svc_config->config); - while (gf_list_count(svcc->sequenceParameterSets)) { - GF_AVCConfigSlot *p = (GF_AVCConfigSlot*)gf_list_get(svcc->sequenceParameterSets, 0); - gf_list_rem(svcc->sequenceParameterSets, 0); - gf_list_add(avcc->sequenceParameterSets, p); - } - while (gf_list_count(svcc->pictureParameterSets)) { - GF_AVCConfigSlot *p = (GF_AVCConfigSlot*)gf_list_get(svcc->pictureParameterSets, 0); - gf_list_rem(svcc->pictureParameterSets, 0); - gf_list_add(avcc->pictureParameterSets, p); - } - gf_odf_avc_cfg_del(svcc); + merge_avc_config(avcc, avc->svc_config->config); } if (avcc) { + if (mdia) merge_all_config(avcc, NULL, mdia); + gf_odf_avc_cfg_write(avcc, &avc->emul_esd->decoderConfig->decoderSpecificInfo->data, &avc->emul_esd->decoderConfig->decoderSpecificInfo->dataLength); gf_odf_avc_cfg_del(avcc); } } else if (avc->svc_config) { svcc = AVC_DuplicateConfig(avc->svc_config->config); + + if (mdia) merge_all_config(svcc, NULL, mdia); + gf_odf_avc_cfg_write(svcc, &avc->emul_esd->decoderConfig->decoderSpecificInfo->data, &avc->emul_esd->decoderConfig->decoderSpecificInfo->dataLength); gf_odf_avc_cfg_del(svcc); } } -void HEVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *hevc) +void AVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *avc) +{ + AVC_RewriteESDescriptorEx(avc, NULL); +} + +void HEVC_RewriteESDescriptorEx(GF_MPEGVisualSampleEntryBox *hevc, GF_MediaBox *mdia) { if (hevc->emul_esd) gf_odf_desc_del((GF_Descriptor *)hevc->emul_esd); hevc->emul_esd = gf_odf_desc_esd_new(2); @@ -743,38 +868,23 @@ void HEVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *hevc) GF_HEVCConfig *hcfg = HEVC_DuplicateConfig(hevc->hevc_config ? hevc->hevc_config->config : hevc->shvc_config->config); if (hevc->hevc_config && hevc->shvc_config) { - u32 j; - GF_HEVCConfig *scfg = HEVC_DuplicateConfig(hevc->shvc_config->config); - //merge all xPS - u32 i, count = scfg->param_array ? gf_list_count(scfg->param_array) : 0; - for (i=0; iparam_array ? gf_list_count(hcfg->param_array) : 0; - GF_HEVCParamArray *ar = gf_list_get(scfg->param_array, i); - for (j=0; jparam_array, j); - if (ar_h->type==ar->type) { - break; - } - ar_h = NULL; - } - if (!ar_h) { - gf_list_add(hcfg->param_array, ar); - gf_list_rem(scfg->param_array, i); - count--; - i--; - } else { - gf_list_transfer(ar_h->nalus, ar->nalus); - } - } - gf_odf_hevc_cfg_del(scfg); + //merge SHVC config to HEVC conf, so we add entry rather than insert + merge_hevc_config(hcfg, hevc->shvc_config->config, GF_FALSE); } + + if (mdia) merge_all_config(NULL, hcfg, mdia); + if (hcfg) { + hcfg->is_shvc = GF_FALSE; gf_odf_hevc_cfg_write(hcfg, &hevc->emul_esd->decoderConfig->decoderSpecificInfo->data, &hevc->emul_esd->decoderConfig->decoderSpecificInfo->dataLength); gf_odf_hevc_cfg_del(hcfg); } } } +void HEVC_RewriteESDescriptor(GF_MPEGVisualSampleEntryBox *hevc) +{ + HEVC_RewriteESDescriptorEx(hevc, NULL); +} GF_Err AVC_HEVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd) { @@ -817,23 +927,22 @@ GF_Err AVC_HEVC_UpdateESD(GF_MPEGVisualSampleEntryBox *avc, GF_ESD *esd) } } - /*update GF_AVCConfig*/ - if (!avc->svc_config) { - if (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC) { - if (!avc->hevc_config) avc->hevc_config = (GF_HEVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_HVCC); - if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { - if (avc->hevc_config->config) gf_odf_hevc_cfg_del(avc->hevc_config->config); - avc->hevc_config->config = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); - } - } else { - if (!avc->avc_config) avc->avc_config = (GF_AVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC); - if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { - if (avc->avc_config->config) gf_odf_avc_cfg_del(avc->avc_config->config); - avc->avc_config->config = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); - } - } + if (!avc->shvc_config && (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_HEVC)) { + if (!avc->hevc_config) avc->hevc_config = (GF_HEVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_HVCC); + if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { + if (avc->hevc_config->config) gf_odf_hevc_cfg_del(avc->hevc_config->config); + avc->hevc_config->config = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 0); + } } + else if (!avc->svc_config && (esd->decoderConfig->objectTypeIndication==GPAC_OTI_VIDEO_AVC)) { + if (!avc->avc_config) avc->avc_config = (GF_AVCConfigurationBox *)gf_isom_box_new(GF_ISOM_BOX_TYPE_AVCC); + if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { + if (avc->avc_config->config) gf_odf_avc_cfg_del(avc->avc_config->config); + avc->avc_config->config = gf_odf_avc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength); + } + } + gf_odf_desc_del((GF_Descriptor *)esd); if (avc->hevc_config) { HEVC_RewriteESDescriptor(avc); diff --git a/src/isomedia/box_code_base.c b/src/isomedia/box_code_base.c index 362b55c..0cc2863 100644 --- a/src/isomedia/box_code_base.c +++ b/src/isomedia/box_code_base.c @@ -8470,7 +8470,7 @@ static void sgpd_del_entry(u32 grouping_type, void *entry) } } -static void sgpd_write_entry(u32 grouping_type, void *entry, GF_BitStream *bs) +void sgpd_write_entry(u32 grouping_type, void *entry, GF_BitStream *bs) { switch (grouping_type) { case GF_4CC( 'r', 'o', 'l', 'l' ): @@ -8506,7 +8506,6 @@ static u32 sgpd_size_entry(u32 grouping_type, void *entry) } } - GF_Box *sgpd_New() { ISOM_DECL_BOX_ALLOC(GF_SampleGroupDescriptionBox, GF_ISOM_BOX_TYPE_SGPD); diff --git a/src/isomedia/box_code_drm.c b/src/isomedia/box_code_drm.c index d640cf3..faa820d 100644 --- a/src/isomedia/box_code_drm.c +++ b/src/isomedia/box_code_drm.c @@ -233,6 +233,7 @@ void schi_del(GF_Box *s) if (ptr->okms) gf_isom_box_del((GF_Box *)ptr->okms); if (ptr->tenc) gf_isom_box_del((GF_Box *)ptr->tenc); if (ptr->piff_tenc) gf_isom_box_del((GF_Box *)ptr->piff_tenc); + if (ptr->adkm) gf_isom_box_del((GF_Box *)ptr->adkm); gf_free(ptr); } @@ -256,6 +257,10 @@ GF_Err schi_AddBox(GF_Box *s, GF_Box *a) if (ptr->tenc) return GF_ISOM_INVALID_FILE; ptr->tenc = (GF_TrackEncryptionBox *)a; return GF_OK; + case GF_ISOM_BOX_TYPE_ADKM: + if (ptr->adkm) return GF_ISOM_INVALID_FILE; + ptr->adkm = (GF_AdobeDRMKeyManagementSystemBox *)a; + return GF_OK; case GF_ISOM_BOX_TYPE_UUID: if (((GF_UUIDBox*)a)->internal_4cc==GF_ISOM_BOX_UUID_TENC) { if (ptr->piff_tenc) return GF_ISOM_INVALID_FILE; @@ -299,6 +304,10 @@ GF_Err schi_Write(GF_Box *s, GF_BitStream *bs) e = gf_isom_box_write((GF_Box *) ptr->tenc, bs); if (e) return e; } + if (ptr->adkm) { + e = gf_isom_box_write((GF_Box *) ptr->adkm, bs); + if (e) return e; + } if (ptr->piff_tenc) { e = gf_isom_box_write((GF_Box *) ptr->piff_tenc, bs); if (e) return e; @@ -334,6 +343,11 @@ GF_Err schi_Size(GF_Box *s) if (e) return e; ptr->size += ptr->tenc->size; } + if (ptr->adkm) { + e = gf_isom_box_size((GF_Box *) ptr->adkm); + if (e) return e; + ptr->size += ptr->adkm->size; + } if (ptr->piff_tenc) { e = gf_isom_box_size((GF_Box *) ptr->tenc); if (e) return e; @@ -1169,7 +1183,7 @@ GF_Err piff_psec_Write(GF_Box *s, GF_BitStream *bs) for (i = 0; i < sample_count; i++) { GF_CENCSampleAuxInfo *sai = (GF_CENCSampleAuxInfo *)gf_list_get(ptr->samp_aux_info, i); if (! sai->IV_size) continue; - gf_bs_write_data(bs, (char *)sai->IV, 16); + gf_bs_write_data(bs, (char *)sai->IV, sai->IV_size); gf_bs_write_u16(bs, sai->subsample_count); for (j = 0; j < sai->subsample_count; j++) { gf_bs_write_u16(bs, sai->subsamples[j].bytes_clear_data); @@ -1392,4 +1406,475 @@ GF_Err senc_Size(GF_Box *s) } #endif //GPAC_DISABLE_ISOM_WRITE +GF_Box *adkm_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeDRMKeyManagementSystemBox, GF_ISOM_BOX_TYPE_ADKM); + tmp->version = 1; + tmp->flags = 0; + return (GF_Box *)tmp; +} + +void adkm_del(GF_Box *s) +{ + GF_AdobeDRMKeyManagementSystemBox *ptr = (GF_AdobeDRMKeyManagementSystemBox *)s; + if (!ptr) return; + if (ptr->header) gf_isom_box_del((GF_Box *)ptr->header); + if (ptr->au_format) gf_isom_box_del((GF_Box *)ptr->au_format); + gf_free(s); +} + +GF_Err adkm_AddBox(GF_Box *s, GF_Box *a) +{ + GF_AdobeDRMKeyManagementSystemBox *ptr = (GF_AdobeDRMKeyManagementSystemBox *)s; + switch (a->type) { + case GF_ISOM_BOX_TYPE_AHDR: + if (ptr->header) return GF_ISOM_INVALID_FILE; + ptr->header = (GF_AdobeDRMHeaderBox *)a; + break; + case GF_ISOM_BOX_TYPE_ADAF: + if (ptr->au_format) return GF_ISOM_INVALID_FILE; + ptr->au_format = (GF_AdobeDRMAUFormatBox *)a; + break; + + default: + return gf_isom_box_add_default(s, a); + } + return GF_OK; +} + +GF_Err adkm_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + e = gf_isom_full_box_read(s, bs); + if (e) return e; + return gf_isom_read_box_list(s, bs, adkm_AddBox); +} + +#ifndef GPAC_DISABLE_ISOM_WRITE +GF_Err adkm_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeDRMKeyManagementSystemBox *ptr = (GF_AdobeDRMKeyManagementSystemBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + //ahdr + e = gf_isom_box_write((GF_Box *) ptr->header, bs); + if (e) return e; + //adaf + e = gf_isom_box_write((GF_Box *) ptr->au_format, bs); + if (e) return e; + + return GF_OK; +} + +GF_Err adkm_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeDRMKeyManagementSystemBox *ptr = (GF_AdobeDRMKeyManagementSystemBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_get_size(s); + if (e) return e; + e = gf_isom_box_size((GF_Box *) ptr->header); + if (e) return e; + ptr->size += ptr->header->size; + e = gf_isom_box_size((GF_Box *) ptr->au_format); + if (e) return e; + ptr->size += ptr->au_format->size; + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *ahdr_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeDRMHeaderBox, GF_ISOM_BOX_TYPE_AHDR); + tmp->version = 2; + tmp->flags = 0; + return (GF_Box *)tmp; +} + +void ahdr_del(GF_Box *s) +{ + GF_AdobeDRMHeaderBox *ptr = (GF_AdobeDRMHeaderBox *)s; + if (!ptr) return; + if (ptr->std_enc_params) gf_isom_box_del((GF_Box *)ptr->std_enc_params); + gf_free(s); +} + + +GF_Err ahdr_AddBox(GF_Box *s, GF_Box *a) +{ + GF_AdobeDRMHeaderBox *ptr = (GF_AdobeDRMHeaderBox *)s; + switch (a->type) { + case GF_ISOM_BOX_TYPE_APRM: + if (ptr->std_enc_params) return GF_ISOM_INVALID_FILE; + ptr->std_enc_params = (GF_AdobeStdEncryptionParamsBox *)a; + break; + + default: + return gf_isom_box_add_default(s, a); + } + return GF_OK; +} + +GF_Err ahdr_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + e = gf_isom_full_box_read(s, bs); + if (e) return e; + return gf_isom_read_box_list(s, bs, ahdr_AddBox); +} + +#ifndef GPAC_DISABLE_ISOM_WRITE +GF_Err ahdr_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeDRMHeaderBox *ptr = (GF_AdobeDRMHeaderBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + e = gf_isom_box_write((GF_Box *) ptr->std_enc_params, bs); + if (e) return e; + + return GF_OK; +} + +GF_Err ahdr_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeDRMHeaderBox *ptr = (GF_AdobeDRMHeaderBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_get_size(s); + if (e) return e; + e = gf_isom_box_size((GF_Box *) ptr->std_enc_params); + if (e) return e; + ptr->size += ptr->std_enc_params->size; + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *aprm_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeStdEncryptionParamsBox, GF_ISOM_BOX_TYPE_APRM); + tmp->version = 1; + tmp->flags = 0; + return (GF_Box *)tmp; +} + +void aprm_del(GF_Box *s) +{ + GF_AdobeStdEncryptionParamsBox *ptr = (GF_AdobeStdEncryptionParamsBox *)s; + if (!ptr) return; + if (ptr->enc_info) gf_isom_box_del((GF_Box *)ptr->enc_info); + if (ptr->key_info) gf_isom_box_del((GF_Box *)ptr->key_info); + gf_free(s); +} + +GF_Err aprm_AddBox(GF_Box *s, GF_Box *a) +{ + GF_AdobeStdEncryptionParamsBox *ptr = (GF_AdobeStdEncryptionParamsBox *)s; + switch (a->type) { + case GF_ISOM_BOX_TYPE_AHDR: + if (ptr->enc_info) return GF_ISOM_INVALID_FILE; + ptr->enc_info = (GF_AdobeEncryptionInfoBox *)a; + break; + case GF_ISOM_BOX_TYPE_ADAF: + if (ptr->key_info) return GF_ISOM_INVALID_FILE; + ptr->key_info = (GF_AdobeKeyInfoBox *)a; + break; + + default: + return gf_isom_box_add_default(s, a); + } + return GF_OK; +} + +GF_Err aprm_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + e = gf_isom_full_box_read(s, bs); + if (e) return e; + return gf_isom_read_box_list(s, bs, aprm_AddBox); +} + +#ifndef GPAC_DISABLE_ISOM_WRITE +GF_Err aprm_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeStdEncryptionParamsBox *ptr = (GF_AdobeStdEncryptionParamsBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + //ahdr + e = gf_isom_box_write((GF_Box *) ptr->enc_info, bs); + if (e) return e; + //adaf + e = gf_isom_box_write((GF_Box *) ptr->key_info, bs); + if (e) return e; + + return GF_OK; +} + +GF_Err aprm_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeStdEncryptionParamsBox *ptr = (GF_AdobeStdEncryptionParamsBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_get_size(s); + if (e) return e; + e = gf_isom_box_size((GF_Box *) ptr->enc_info); + if (e) return e; + ptr->size += ptr->enc_info->size; + e = gf_isom_box_size((GF_Box *) ptr->key_info); + if (e) return e; + ptr->size += ptr->key_info->size; + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *aeib_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeEncryptionInfoBox, GF_ISOM_BOX_TYPE_AEIB); + tmp->version = 1; + tmp->flags = 0; + return (GF_Box *)tmp; +} + +void aeib_del(GF_Box *s) +{ + GF_AdobeEncryptionInfoBox *ptr = (GF_AdobeEncryptionInfoBox*)s; + if (!ptr) return; + if (ptr->enc_algo) gf_free(ptr->enc_algo); + gf_free(ptr); +} + +GF_Err aeib_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeEncryptionInfoBox *ptr = (GF_AdobeEncryptionInfoBox*)s; + u32 len; + + e = gf_isom_full_box_read(s, bs); + if (e) return e; + + len = (u32) ptr->size - 1; + if (len) { + if (ptr->enc_algo) return GF_ISOM_INVALID_FILE; + ptr->enc_algo = (char *)gf_malloc(len*sizeof(char)); + gf_bs_read_data(bs, ptr->enc_algo, len); + } + ptr->key_length = gf_bs_read_u8(bs); + ptr->size = 0; + return GF_OK; +} + +#ifndef GPAC_DISABLE_ISOM_WRITE + +GF_Err aeib_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeEncryptionInfoBox *ptr = (GF_AdobeEncryptionInfoBox *) s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + if (ptr->enc_algo) { + gf_bs_write_data(bs, (char *) ptr->enc_algo, (u32) strlen(ptr->enc_algo)); + gf_bs_write_u8(bs, 0); //string end + } + gf_bs_write_u8(bs, ptr->key_length); + return GF_OK; +} + +GF_Err aeib_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeEncryptionInfoBox *ptr = (GF_AdobeEncryptionInfoBox*)s; + e = gf_isom_full_box_get_size(s); + if (e) return e; + if (ptr->enc_algo) + ptr->size += strlen(ptr->enc_algo) + 1; + ptr->size += 1; //KeyLength + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *akey_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeKeyInfoBox, GF_ISOM_BOX_TYPE_AKEY); + tmp->version = 1; + tmp->flags = 0; + return (GF_Box *)tmp; +} + +void akey_del(GF_Box *s) +{ + GF_AdobeKeyInfoBox *ptr = (GF_AdobeKeyInfoBox *)s; + if (!ptr) return; + if (ptr->params) gf_isom_box_del((GF_Box *)ptr->params); + gf_free(s); +} + +GF_Err akey_AddBox(GF_Box *s, GF_Box *a) +{ + GF_AdobeKeyInfoBox *ptr = (GF_AdobeKeyInfoBox *)s; + switch (a->type) { + case GF_ISOM_BOX_TYPE_FLXS: + if (ptr->params) return GF_ISOM_INVALID_FILE; + ptr->params = (GF_AdobeFlashAccessParamsBox *)a; + break; + default: + return gf_isom_box_add_default(s, a); + } + return GF_OK; +} + +GF_Err akey_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + e = gf_isom_full_box_read(s, bs); + if (e) return e; + + return gf_isom_read_box_list(s, bs, akey_AddBox); +} + +#ifndef GPAC_DISABLE_ISOM_WRITE +GF_Err akey_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeKeyInfoBox *ptr = (GF_AdobeKeyInfoBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + e = gf_isom_box_write((GF_Box *) ptr->params, bs); + if (e) return e; + + return GF_OK; +} + +GF_Err akey_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeKeyInfoBox *ptr = (GF_AdobeKeyInfoBox *)s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_get_size(s); + if (e) return e; + e = gf_isom_box_size((GF_Box *) ptr->params); + if (e) return e; + ptr->size += ptr->params->size; + e = gf_isom_box_size((GF_Box *) ptr->params); + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *flxs_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeFlashAccessParamsBox, GF_ISOM_BOX_TYPE_FLXS); + return (GF_Box *)tmp; +} + +void flxs_del(GF_Box *s) +{ + GF_AdobeFlashAccessParamsBox *ptr = (GF_AdobeFlashAccessParamsBox*)s; + if (!ptr) return; + if (ptr->metadata) + gf_free(ptr->metadata); + gf_free(ptr); +} + +GF_Err flxs_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_AdobeFlashAccessParamsBox *ptr = (GF_AdobeFlashAccessParamsBox*)s; + u32 len; + + len = (u32) ptr->size; + if (len) { + if (ptr->metadata) return GF_ISOM_INVALID_FILE; + ptr->metadata = (char *)gf_malloc(len*sizeof(char)); + gf_bs_read_data(bs, ptr->metadata, len); + } + return GF_OK; +} + +#ifndef GPAC_DISABLE_ISOM_WRITE + +GF_Err flxs_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeFlashAccessParamsBox *ptr = (GF_AdobeFlashAccessParamsBox *) s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_box_write_header(s, bs); + if (e) return e; + if (ptr->metadata) { + gf_bs_write_data(bs, ptr->metadata, (u32) strlen(ptr->metadata)); + gf_bs_write_u8(bs, 0); //string end + } + return GF_OK; +} + +GF_Err flxs_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeFlashAccessParamsBox *ptr = (GF_AdobeFlashAccessParamsBox*)s; + e = gf_isom_box_get_size(s); + if (e) return e; + if (ptr->metadata) + ptr->size += strlen(ptr->metadata) + 1; + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + +GF_Box *adaf_New() +{ + ISOM_DECL_BOX_ALLOC(GF_AdobeDRMAUFormatBox, GF_ISOM_BOX_TYPE_ADAF); + return (GF_Box *)tmp; +} + +void adaf_del(GF_Box *s) +{ + gf_free(s); +} + +GF_Err adaf_Read(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeDRMAUFormatBox *ptr = (GF_AdobeDRMAUFormatBox*)s; + + e = gf_isom_full_box_read(s, bs); + if (e) return e; + + ptr->selective_enc = gf_bs_read_u8(bs); + gf_bs_read_u8(bs);//resersed + ptr->IV_length = gf_bs_read_u8(bs); + ptr->size -= 3; + return GF_OK; +} + +#ifndef GPAC_DISABLE_ISOM_WRITE + +GF_Err adaf_Write(GF_Box *s, GF_BitStream *bs) +{ + GF_Err e; + GF_AdobeDRMAUFormatBox *ptr = (GF_AdobeDRMAUFormatBox *) s; + if (!s) return GF_BAD_PARAM; + e = gf_isom_full_box_write(s, bs); + if (e) return e; + + gf_bs_write_u8(bs, ptr->selective_enc); + gf_bs_write_u8(bs, 0x0); + gf_bs_write_u8(bs, ptr->IV_length); + return GF_OK; +} + +GF_Err adaf_Size(GF_Box *s) +{ + GF_Err e; + GF_AdobeDRMAUFormatBox *ptr = (GF_AdobeDRMAUFormatBox*)s; + e = gf_isom_full_box_get_size(s); + if (e) return e; + ptr->size += 3; + return GF_OK; +} +#endif //GPAC_DISABLE_ISOM_WRITE + + #endif /*GPAC_DISABLE_ISOM*/ diff --git a/src/isomedia/box_dump.c b/src/isomedia/box_dump.c index 13be522..669c693 100644 --- a/src/isomedia/box_dump.c +++ b/src/isomedia/box_dump.c @@ -514,6 +514,22 @@ GF_Err gf_box_dump(void *ptr, FILE * trace) case GF_ISOM_BOX_TYPE_SBTT: return metx_dump(a, trace); #endif + + /*Adobe's protection boxes*/ + case GF_ISOM_BOX_TYPE_ADKM: + return adkm_dump(a, trace); + case GF_ISOM_BOX_TYPE_AHDR: + return ahdr_dump(a, trace); + case GF_ISOM_BOX_TYPE_ADAF: + return adaf_dump(a, trace); + case GF_ISOM_BOX_TYPE_APRM: + return aprm_dump(a, trace); + case GF_ISOM_BOX_TYPE_AEIB: + return aeib_dump(a, trace); + case GF_ISOM_BOX_TYPE_AKEY: + return akey_dump(a, trace); + case GF_ISOM_BOX_TYPE_FLXS: + return flxs_dump(a, trace); default: return defa_dump(a, trace); @@ -3309,6 +3325,7 @@ GF_Err schi_dump(GF_Box *a, FILE * trace) if (p->isfm) gf_box_dump(p->isfm, trace); if (p->okms) gf_box_dump(p->okms, trace); if (p->tenc) gf_box_dump(p->tenc, trace); + if (p->adkm) gf_box_dump(p->adkm, trace); gf_box_dump_done("SchemeInformationBox", a, trace); return GF_OK; } @@ -4258,4 +4275,88 @@ GF_Err prft_dump(GF_Box *a, FILE * trace) return GF_OK; } +GF_Err adkm_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeDRMKeyManagementSystemBox *ptr = (GF_AdobeDRMKeyManagementSystemBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n"); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + if (ptr->header) gf_box_dump((GF_Box *)ptr->header, trace); + if (ptr->au_format) gf_box_dump((GF_Box *)ptr->au_format, trace); + gf_box_dump_done("GF_AdobeDRMKeyManagementSystemBox", a, trace); + return GF_OK; +} + +GF_Err ahdr_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeDRMHeaderBox *ptr = (GF_AdobeDRMHeaderBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n"); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + if (ptr->std_enc_params) gf_box_dump((GF_Box *)ptr->std_enc_params, trace); + gf_box_dump_done("GF_AdobeDRMHeaderBox", a, trace); + return GF_OK; +} + +GF_Err aprm_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeStdEncryptionParamsBox *ptr = (GF_AdobeStdEncryptionParamsBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n"); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + if (ptr->enc_info) gf_box_dump((GF_Box *)ptr->enc_info, trace); + if (ptr->key_info) gf_box_dump((GF_Box *)ptr->key_info, trace); + gf_box_dump_done("GF_AdobeStdEncryptionParamsBox", a, trace); + return GF_OK; +} + +GF_Err aeib_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeEncryptionInfoBox *ptr = (GF_AdobeEncryptionInfoBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n", ptr->enc_algo, ptr->key_length); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + gf_box_dump_done("GF_AdobeEncryptionInfoBox", a, trace); + return GF_OK; +} + +GF_Err akey_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeKeyInfoBox *ptr = (GF_AdobeKeyInfoBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n"); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + if (ptr->params) gf_box_dump((GF_Box *)ptr->params, trace); + gf_box_dump_done("GF_AdobeKeyInfoBox", a, trace); + return GF_OK; +} + +GF_Err flxs_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeFlashAccessParamsBox *ptr = (GF_AdobeFlashAccessParamsBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n"); + DumpBox(a, trace); + if (ptr->metadata) + fprintf(trace, "\n", ptr->metadata); + gf_box_dump_done("GF_AdobeFlashAccessParamsBox", a, trace); + return GF_OK; +} + +GF_Err adaf_dump(GF_Box *a, FILE * trace) +{ + GF_AdobeDRMAUFormatBox *ptr = (GF_AdobeDRMAUFormatBox *)a; + if (!a) return GF_BAD_PARAM; + fprintf(trace, "\n", ptr->selective_enc ? 1 : 0, ptr->IV_length); + DumpBox(a, trace); + gf_full_box_dump((GF_Box *)a, trace); + gf_box_dump_done("GF_AdobeDRMAUFormatBox", a, trace); + return GF_OK; +} + #endif /*GPAC_DISABLE_ISOM_DUMP*/ diff --git a/src/isomedia/box_funcs.c b/src/isomedia/box_funcs.c index 394b6fa..bfa0128 100644 --- a/src/isomedia/box_funcs.c +++ b/src/isomedia/box_funcs.c @@ -237,6 +237,7 @@ GF_Err gf_isom_read_box_list_ex(GF_Box *parent, GF_BitStream *bs, GF_Err (*add_b { GF_Err e; GF_Box *a = NULL; + while (parent->size) { e = gf_isom_parse_box_ex(&a, bs, parent_type); if (e) { @@ -691,6 +692,14 @@ GF_Box *gf_isom_box_new(u32 boxType) case GF_ISOM_BOX_TYPE_SBTT: return metx_New(GF_ISOM_BOX_TYPE_SBTT); #endif //GPAC_DISABLE_TTXT + case GF_ISOM_BOX_TYPE_ADKM: return adkm_New(); + case GF_ISOM_BOX_TYPE_AHDR: return ahdr_New(); + case GF_ISOM_BOX_TYPE_APRM: return aprm_New(); + case GF_ISOM_BOX_TYPE_AEIB: return aeib_New(); + case GF_ISOM_BOX_TYPE_AKEY: return akey_New(); + case GF_ISOM_BOX_TYPE_FLXS: return flxs_New(); + case GF_ISOM_BOX_TYPE_ADAF: return adaf_New(); + default: a = defa_New(); if (a) a->type = boxType; @@ -1022,6 +1031,14 @@ void gf_isom_box_del(GF_Box *a) #endif // GPAC_DISABLE_TTXT + case GF_ISOM_BOX_TYPE_ADKM: adkm_del(a); return; + case GF_ISOM_BOX_TYPE_AHDR: ahdr_del(a); return; + case GF_ISOM_BOX_TYPE_APRM: aprm_del(a); return; + case GF_ISOM_BOX_TYPE_AEIB: aeib_del(a); return; + case GF_ISOM_BOX_TYPE_AKEY: akey_del(a); return; + case GF_ISOM_BOX_TYPE_FLXS: flxs_del(a); return; + case GF_ISOM_BOX_TYPE_ADAF: adaf_del(a); return; + default: defa_del(a); return; @@ -1308,6 +1325,14 @@ GF_Err gf_isom_box_read(GF_Box *a, GF_BitStream *bs) #endif // GPAC_DISABLE_TTXT + case GF_ISOM_BOX_TYPE_ADKM: return adkm_Read(a, bs); + case GF_ISOM_BOX_TYPE_AHDR: return ahdr_Read(a, bs); + case GF_ISOM_BOX_TYPE_APRM: return aprm_Read(a, bs); + case GF_ISOM_BOX_TYPE_AEIB: return aeib_Read(a, bs); + case GF_ISOM_BOX_TYPE_AKEY: return akey_Read(a, bs); + case GF_ISOM_BOX_TYPE_FLXS: return flxs_Read(a, bs); + case GF_ISOM_BOX_TYPE_ADAF: return adaf_Read(a, bs); + default: return defa_Read(a, bs); } @@ -1597,6 +1622,14 @@ GF_Err gf_isom_box_write_listing(GF_Box *a, GF_BitStream *bs) case GF_ISOM_BOX_TYPE_SBTT: return metx_Write(a, bs); #endif//GPAC_DISABLE_TTXT + case GF_ISOM_BOX_TYPE_ADKM: return adkm_Write(a, bs); + case GF_ISOM_BOX_TYPE_AHDR: return ahdr_Write(a, bs); + case GF_ISOM_BOX_TYPE_APRM: return aprm_Write(a, bs); + case GF_ISOM_BOX_TYPE_AEIB: return aeib_Write(a, bs); + case GF_ISOM_BOX_TYPE_AKEY: return akey_Write(a, bs); + case GF_ISOM_BOX_TYPE_FLXS: return flxs_Write(a, bs); + case GF_ISOM_BOX_TYPE_ADAF: return adaf_Write(a, bs); + default: return defa_Write(a, bs); } @@ -1892,6 +1925,14 @@ static GF_Err gf_isom_box_size_listing(GF_Box *a) case GF_ISOM_BOX_TYPE_SBTT: return metx_Size(a); #endif // GPAC_DISABLE_TTXT + case GF_ISOM_BOX_TYPE_ADKM: return adkm_Size(a); + case GF_ISOM_BOX_TYPE_AHDR: return ahdr_Size(a); + case GF_ISOM_BOX_TYPE_APRM: return aprm_Size(a); + case GF_ISOM_BOX_TYPE_AEIB: return aeib_Size(a); + case GF_ISOM_BOX_TYPE_AKEY: return akey_Size(a); + case GF_ISOM_BOX_TYPE_FLXS: return flxs_Size(a); + case GF_ISOM_BOX_TYPE_ADAF: return adaf_Size(a); + default: return defa_Size(a); } } diff --git a/src/isomedia/drm_sample.c b/src/isomedia/drm_sample.c index 8511d9b..42588be 100644 --- a/src/isomedia/drm_sample.c +++ b/src/isomedia/drm_sample.c @@ -334,6 +334,7 @@ GF_Err gf_isom_remove_track_protection(GF_ISOFile *the_file, u32 trackNumber, u3 if (!sinf) sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_CBC_SCHEME, &sea); if (!sinf) sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_ISMACRYP_SCHEME, &sea); if (!sinf) sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_OMADRM_SCHEME, &sea); + if (!sinf) sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_ADOBE_SCHEME, &sea); if (!sinf) return GF_OK; sea->type = sinf->original_format->data_format; @@ -374,7 +375,8 @@ GF_Err gf_isom_change_ismacryp_protection(GF_ISOFile *the_file, u32 trackNumber, } -static GF_Err gf_isom_set_protected_entry(GF_ISOFile *the_file, u32 trackNumber, u32 desc_index, u32 scheme_type, u32 scheme_version, Bool is_isma, GF_ProtectionInfoBox **out_sinf) +static GF_Err gf_isom_set_protected_entry(GF_ISOFile *the_file, u32 trackNumber, u32 desc_index, u8 version, u32 flags, + u32 scheme_type, u32 scheme_version, char *scheme_uri, Bool is_isma, GF_ProtectionInfoBox **out_sinf) { u32 original_format; GF_Err e; @@ -439,8 +441,14 @@ static GF_Err gf_isom_set_protected_entry(GF_ISOFile *the_file, u32 trackNumber, gf_list_add(sea->protections, sinf); sinf->scheme_type = (GF_SchemeTypeBox *)schm_New(); + sinf->scheme_type->version = version; + sinf->scheme_type->flags = flags; sinf->scheme_type->scheme_type = scheme_type; sinf->scheme_type->scheme_version = scheme_version; + if (sinf->scheme_type->flags == 1) { + sinf->scheme_type->URI = (char *)gf_malloc(sizeof(char)*strlen(scheme_uri)); + memmove(sinf->scheme_type->URI, scheme_uri, strlen(scheme_uri)); + } sinf->original_format = (GF_OriginalFormatBox *)frma_New(); sinf->original_format->data_format = original_format; @@ -460,7 +468,7 @@ GF_Err gf_isom_set_ismacryp_protection(GF_ISOFile *the_file, u32 trackNumber, u3 GF_ProtectionInfoBox *sinf; //setup generic protection - e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, scheme_type, scheme_version, GF_TRUE, &sinf); + e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, 0, 0, scheme_type, scheme_version, NULL, GF_TRUE, &sinf); if (e) return e; if (scheme_uri) { @@ -486,7 +494,7 @@ GF_Err gf_isom_set_oma_protection(GF_ISOFile *the_file, u32 trackNumber, u32 des GF_Err e; //setup generic protection - e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, GF_ISOM_OMADRM_SCHEME, 0x00000200, GF_FALSE, &sinf); + e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, 0, 0, GF_ISOM_OMADRM_SCHEME, 0x00000200, NULL, GF_FALSE, &sinf); if (e) return e; sinf->info->okms = (GF_OMADRMKMSBox *)odkm_New(); @@ -591,7 +599,7 @@ GF_Err gf_isom_set_cenc_protection(GF_ISOFile *the_file, u32 trackNumber, u32 de GF_ProtectionInfoBox *sinf; //setup generic protection - e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, scheme_type, scheme_version, GF_FALSE, &sinf); + e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, 0, 0, scheme_type, scheme_version, NULL, GF_FALSE, &sinf); if (e) return e; sinf->info->tenc = (GF_TrackEncryptionBox *)tenc_New(); @@ -1072,7 +1080,7 @@ static GF_Err gf_isom_cenc_get_sai_by_saiz_saio(GF_MediaBox *mdia, u32 sampleNum offset += (nb_saio == 1) ? prev_sai_size : 0; cur_position = gf_bs_get_position(mdia->information->dataHandler->bs); gf_bs_seek(mdia->information->dataHandler->bs, offset); - buffer = (char *)malloc(size); + buffer = (char *)gf_malloc(size); gf_bs_read_data(mdia->information->dataHandler->bs, buffer, size); gf_bs_seek(mdia->information->dataHandler->bs, cur_position); @@ -1080,7 +1088,7 @@ static GF_Err gf_isom_cenc_get_sai_by_saiz_saio(GF_MediaBox *mdia, u32 sampleNum memset(*sai, 0, sizeof(GF_CENCSampleAuxInfo)); bs = gf_bs_new(buffer, size, GF_BITSTREAM_READ); gf_bs_read_data(bs, (char *)(*sai)->IV, IV_size); - if (size > 16) { + if (size > IV_size) { (*sai)->subsample_count = gf_bs_read_u16(bs); (*sai)->subsamples = (GF_CENCSubSampleEntry *)gf_malloc(sizeof(GF_CENCSubSampleEntry)*(*sai)->subsample_count); for (i = 0; i < (*sai)->subsample_count; i++) { @@ -1132,8 +1140,10 @@ GF_Err gf_isom_cenc_get_sample_aux_info(GF_ISOFile *the_file, u32 trackNumber, u } gf_isom_get_sample_cenc_info_ex(trak, NULL, sampleNumber, NULL, &IV_size, NULL); - if (!IV_size) + if (!IV_size) { + GF_SAFEALLOC( (*sai), GF_CENCSampleAuxInfo); return GF_OK; + } /*get sample auxiliary information by saiz/saio rather than by parsing senc box*/ if (gf_isom_cenc_has_saiz_saio(stbl, NULL)) { @@ -1192,6 +1202,93 @@ void gf_isom_cenc_get_default_info(GF_ISOFile *the_file, u32 trackNumber, u32 sa gf_isom_cenc_get_default_info_ex(trak, sampleDescriptionIndex, default_IsEncrypted, default_IV_size, default_KID); } +/* + Adobe'protection scheme +*/ +GF_Err gf_isom_set_adobe_protection(GF_ISOFile *the_file, u32 trackNumber, u32 desc_index, u32 scheme_type, u32 scheme_version, Bool is_selective_enc, char *metadata, u32 len) +{ + GF_Err e; + GF_ProtectionInfoBox *sinf; + + //setup generic protection + e = gf_isom_set_protected_entry(the_file, trackNumber, desc_index, 1, 0, scheme_type, scheme_version, NULL, GF_FALSE, &sinf); + if (e) return e; + + sinf->info->adkm = (GF_AdobeDRMKeyManagementSystemBox *)adkm_New(); + + sinf->info->adkm->header = (GF_AdobeDRMHeaderBox *)ahdr_New(); + + sinf->info->adkm->header->std_enc_params = (GF_AdobeStdEncryptionParamsBox *)aprm_New(); + + sinf->info->adkm->header->std_enc_params->enc_info = (GF_AdobeEncryptionInfoBox *)aeib_New(); + if (sinf->info->adkm->header->std_enc_params->enc_info->enc_algo) + gf_free(sinf->info->adkm->header->std_enc_params->enc_info->enc_algo); + sinf->info->adkm->header->std_enc_params->enc_info->enc_algo = (char *)gf_malloc(8*sizeof(char)); + strncpy(sinf->info->adkm->header->std_enc_params->enc_info->enc_algo, "AES-CBC", 7); + sinf->info->adkm->header->std_enc_params->enc_info->enc_algo[7] = 0; + sinf->info->adkm->header->std_enc_params->enc_info->key_length = 16; + + sinf->info->adkm->header->std_enc_params->key_info = (GF_AdobeKeyInfoBox *)akey_New(); + + sinf->info->adkm->header->std_enc_params->key_info->params = (GF_AdobeFlashAccessParamsBox *)flxs_New(); + if (metadata && len) { + if (sinf->info->adkm->header->std_enc_params->key_info->params->metadata) + gf_free(sinf->info->adkm->header->std_enc_params->key_info->params->metadata); + sinf->info->adkm->header->std_enc_params->key_info->params->metadata = (char *)gf_malloc((len+1)*sizeof(char)); + strncpy(sinf->info->adkm->header->std_enc_params->key_info->params->metadata, metadata, len); + sinf->info->adkm->header->std_enc_params->key_info->params->metadata[len] = 0; + } + + sinf->info->adkm->au_format = (GF_AdobeDRMAUFormatBox *)adaf_New(); + sinf->info->adkm->au_format->selective_enc = is_selective_enc ? 0x10 : 0x00; + sinf->info->adkm->au_format->IV_length = 16; + + return GF_OK; +} + +GF_EXPORT +Bool gf_isom_is_adobe_protection_media(GF_ISOFile *the_file, u32 trackNumber, u32 sampleDescriptionIndex) +{ + GF_TrackBox *trak; + GF_ProtectionInfoBox *sinf; + + trak = gf_isom_get_track_from_file(the_file, trackNumber); + if (!trak) return GF_FALSE; + + sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_ADOBE_SCHEME, NULL); + + if (!sinf) return GF_FALSE; + + /*non-encrypted or non-ADOBE*/ + if (!sinf->info || !sinf->info->adkm) + return GF_FALSE; + + return GF_TRUE; +} + +GF_EXPORT +GF_Err gf_isom_get_adobe_protection_info(GF_ISOFile *the_file, u32 trackNumber, u32 sampleDescriptionIndex, u32 *outOriginalFormat, u32 *outSchemeType, u32 *outSchemeVersion) +{ + GF_TrackBox *trak; + GF_ProtectionInfoBox *sinf; + + trak = gf_isom_get_track_from_file(the_file, trackNumber); + if (!trak) return GF_BAD_PARAM; + + sinf = gf_isom_get_sinf_entry(trak, sampleDescriptionIndex, GF_ISOM_ADOBE_SCHEME, NULL); + + if (!sinf) return GF_BAD_PARAM; + + if (outOriginalFormat) { + *outOriginalFormat = sinf->original_format->data_format; + if (IsMP4Description(sinf->original_format->data_format)) *outOriginalFormat = GF_ISOM_SUBTYPE_MPEG4; + } + if (outSchemeType) *outSchemeType = sinf->scheme_type->scheme_type; + if (outSchemeVersion) *outSchemeVersion = sinf->scheme_type->scheme_version; + + return GF_OK; +} + #endif // GPAC_DISABLE_ISOM_FRAGMENTS diff --git a/src/isomedia/isom_read.c b/src/isomedia/isom_read.c index 400fb3e..382155b 100644 --- a/src/isomedia/isom_read.c +++ b/src/isomedia/isom_read.c @@ -1244,18 +1244,17 @@ GF_ISOSample *gf_isom_get_sample(GF_ISOFile *the_file, u32 trackNumber, u32 samp GF_EXPORT u32 gf_isom_get_sample_duration(GF_ISOFile *the_file, u32 trackNumber, u32 sampleNumber) { - u64 dur; + u32 dur; u64 dts; GF_TrackBox *trak = gf_isom_get_track_from_file(the_file, trackNumber); if (!trak || !sampleNumber) return 0; +#ifndef GPAC_DISABLE_ISOM_FRAGMENTS + if (sampleNumber<=trak->sample_count_at_seg_start) return 0; + sampleNumber -= trak->sample_count_at_seg_start; +#endif - stbl_GetSampleDTS(trak->Media->information->sampleTable->TimeToSample, sampleNumber, &dur); - if (sampleNumber == trak->Media->information->sampleTable->SampleSize->sampleCount) { - return (u32) (trak->Media->mediaHeader->duration - dur); - } - - stbl_GetSampleDTS(trak->Media->information->sampleTable->TimeToSample, sampleNumber+1, &dts); - return (u32) (dts - dur); + stbl_GetSampleDTS_and_Duration(trak->Media->information->sampleTable->TimeToSample, sampleNumber, &dts, &dur); + return dur; } @@ -1265,7 +1264,10 @@ u32 gf_isom_get_sample_size(GF_ISOFile *the_file, u32 trackNumber, u32 sampleNum u32 size = 0; GF_TrackBox *trak = gf_isom_get_track_from_file(the_file, trackNumber); if (!trak || !sampleNumber) return 0; - +#ifndef GPAC_DISABLE_ISOM_FRAGMENTS + if (sampleNumber<=trak->sample_count_at_seg_start) return 0; + sampleNumber -= trak->sample_count_at_seg_start; +#endif stbl_GetSampleSize(trak->Media->information->sampleTable->SampleSize, sampleNumber, &size); return size; } @@ -1279,6 +1281,10 @@ u8 gf_isom_get_sample_sync(GF_ISOFile *the_file, u32 trackNumber, u32 sampleNumb if (!trak || !sampleNumber) return 0; if (! trak->Media->information->sampleTable->SyncSample) return 1; +#ifndef GPAC_DISABLE_ISOM_FRAGMENTS + if (sampleNumber<=trak->sample_count_at_seg_start) return 0; + sampleNumber -= trak->sample_count_at_seg_start; +#endif e = stbl_GetSampleRAP(trak->Media->information->sampleTable->SyncSample, sampleNumber, &is_rap, NULL, NULL); if (e) return 0; return is_rap; @@ -1295,6 +1301,10 @@ GF_ISOSample *gf_isom_get_sample_info(GF_ISOFile *the_file, u32 trackNumber, u32 if (!trak) return NULL; if (!sampleNumber) return NULL; +#ifndef GPAC_DISABLE_ISOM_FRAGMENTS + if (sampleNumber<=trak->sample_count_at_seg_start) return NULL; + sampleNumber -= trak->sample_count_at_seg_start; +#endif samp = gf_isom_sample_new(); if (!samp) return NULL; e = Media_GetSample(trak->Media, sampleNumber, &samp, sampleDescriptionIndex, 1, data_offset); @@ -1319,6 +1329,10 @@ u64 gf_isom_get_sample_dts(GF_ISOFile *the_file, u32 trackNumber, u32 sampleNumb if (!trak) return 0; if (!sampleNumber) return 0; +#ifndef GPAC_DISABLE_ISOM_FRAGMENTS + if (sampleNumber<=trak->sample_count_at_seg_start) return 0; + sampleNumber -= trak->sample_count_at_seg_start; +#endif if (stbl_GetSampleDTS(trak->Media->information->sampleTable->TimeToSample, sampleNumber, &dts) != GF_OK) return 0; return dts; } @@ -2245,10 +2259,24 @@ GF_Err gf_isom_release_segment(GF_ISOFile *movie, Bool reset_tables) if (reset_tables) { u32 type, dur; u64 dts; + Bool scalable = has_scalable; GF_SampleTableBox *stbl = trak->Media->information->sampleTable; - if (has_scalable && !gf_isom_get_reference_count(movie, i+1, GF_ISOM_REF_SCAL)) + + if (scalable) { + //check if the base reference is in the file - if not, do not consider the track is scalable. + if (gf_isom_get_reference_count(movie, i+1, GF_ISOM_REF_BASE) > 0) { + u32 on_track=0; + GF_TrackBox *base; + gf_isom_get_reference(movie, i+1, GF_ISOM_REF_BASE, 1, &on_track); + + base = gf_isom_get_track_from_file(movie, on_track); + if (!base) scalable = GF_FALSE; + } + } + + if (scalable && !gf_isom_get_reference_count(movie, i+1, GF_ISOM_REF_SCAL)) base_track_sample_count = stbl->SampleSize->sampleCount; - trak->sample_count_at_seg_start += has_scalable ? base_track_sample_count : stbl->SampleSize->sampleCount; + trak->sample_count_at_seg_start += scalable ? base_track_sample_count : stbl->SampleSize->sampleCount; if (trak->sample_count_at_seg_start) { GF_Err e; e = stbl_GetSampleDTS_and_Duration(stbl->TimeToSample, stbl->SampleSize->sampleCount, &dts, &dur); @@ -3316,8 +3344,9 @@ Bool gf_isom_has_scalable_layer(GF_ISOFile *file) return GF_FALSE; count = gf_isom_get_track_count(file); for (i = 0; i < count; i++) { - if (gf_isom_get_reference_count(file, i+1, GF_ISOM_REF_SCAL) > 0) + if (gf_isom_get_reference_count(file, i+1, GF_ISOM_REF_SCAL) > 0) { return GF_TRUE; + } } return GF_FALSE; } diff --git a/src/isomedia/isom_write.c b/src/isomedia/isom_write.c index f8dae39..1bdb4a0 100644 --- a/src/isomedia/isom_write.c +++ b/src/isomedia/isom_write.c @@ -4288,8 +4288,9 @@ GF_Err gf_isom_set_rvc_config(GF_ISOFile *movie, u32 track, u32 sampleDescriptio return GF_OK; } - -GF_Err gf_isom_add_sample_group_entry(GF_List *sampleGroups, u32 sample_number, u32 grouping_type, u32 sampleGroupDescriptionIndex) +/*for now not exported*/ +/*expands sampleGroup table for the given grouping type and sample_number. If sample_number is 0, just appends an entry at the end of the table*/ +static GF_Err gf_isom_add_sample_group_entry(GF_List *sampleGroups, u32 sample_number, u32 grouping_type, u32 sampleGroupDescriptionIndex) { GF_SampleGroupBox *sgroup = NULL; u32 i, count, last_sample_in_entry; @@ -4368,27 +4369,27 @@ GF_Err gf_isom_add_sample_group_entry(GF_List *sampleGroups, u32 sample_number, return GF_OK; } -/*for now not exported*/ -static GF_Err gf_isom_set_sample_group_info(GF_ISOFile *movie, u32 track, u32 sample_number, u32 grouping_type, void *udta, void *(*sg_create_entry)(void *udta), Bool (*sg_compare_entry)(void *udta, void *entry)) + +static GF_Err gf_isom_set_sample_group_info_ex(GF_SampleTableBox *stbl, GF_TrackFragmentBox *traf, u32 sample_number, u32 grouping_type, void *udta, void *(*sg_create_entry)(void *udta), Bool (*sg_compare_entry)(void *udta, void *entry)) { - GF_Err e; - GF_TrackBox *trak; GF_List *groupList; void *entry; GF_SampleGroupDescriptionBox *sgdesc = NULL; u32 i, count, entry_idx; - e = CanAccessMovie(movie, GF_ISOM_OPEN_WRITE); - if (e) return e; - - trak = gf_isom_get_track_from_file(movie, track); - if (!trak) return GF_BAD_PARAM; + if (!stbl && !traf) return GF_BAD_PARAM; - /*look in stbl for sample sampleGroupsDescription*/ - if (!trak->Media->information->sampleTable->sampleGroupsDescription) - trak->Media->information->sampleTable->sampleGroupsDescription = gf_list_new(); + /*look in stbl or traf for sample sampleGroupsDescription*/ + if (traf) { + if (!traf->sampleGroupsDescription) + traf->sampleGroupsDescription = gf_list_new(); + groupList = traf->sampleGroupsDescription; + } else { + if (!stbl->sampleGroupsDescription) + stbl->sampleGroupsDescription = gf_list_new(); + groupList = stbl->sampleGroupsDescription; + } - groupList = trak->Media->information->sampleTable->sampleGroupsDescription; count = gf_list_count(groupList); for (i=0; igroup_descriptions, entry); - /*look in stbl for sample sampleGroups*/ - if (!trak->Media->information->sampleTable->sampleGroups) - trak->Media->information->sampleTable->sampleGroups = gf_list_new(); + /*look in stbl or traf for sample sampleGroups*/ + if (traf) { + if (!traf->sampleGroups) + traf->sampleGroups = gf_list_new(); + groupList = traf->sampleGroups; + entry_idx |= 0x10000; + } else { + if (!stbl->sampleGroups) + stbl->sampleGroups = gf_list_new(); + groupList = stbl->sampleGroups; + } + + return gf_isom_add_sample_group_entry(groupList, sample_number, grouping_type, entry_idx); +} + +/*for now not exported*/ +static GF_Err gf_isom_set_sample_group_info(GF_ISOFile *movie, u32 track, u32 sample_number, u32 grouping_type, void *udta, void *(*sg_create_entry)(void *udta), Bool (*sg_compare_entry)(void *udta, void *entry)) +{ + GF_Err e; + GF_TrackBox *trak; + + e = CanAccessMovie(movie, GF_ISOM_OPEN_WRITE); + if (e) return e; - groupList = trak->Media->information->sampleTable->sampleGroups; + trak = gf_isom_get_track_from_file(movie, track); + if (!trak) return GF_BAD_PARAM; - return gf_isom_add_sample_group_entry(trak->Media->information->sampleTable->sampleGroups, sample_number, grouping_type, entry_idx); + return gf_isom_set_sample_group_info_ex(trak->Media->information->sampleTable, NULL, sample_number, grouping_type, udta, sg_create_entry, sg_compare_entry); } void *sg_rap_create_entry(void *udta) @@ -4498,6 +4520,62 @@ Bool sg_encryption_compare_entry(void *udta, void *entry) return 0; } +GF_Err gf_isom_copy_sample_group_entry_to_traf(GF_TrackFragmentBox *traf, GF_SampleTableBox *stbl, u32 grouping_type, u32 sampleGroupDescriptionIndex, Bool sgpd_in_traf) +{ + if (sgpd_in_traf) { + void *entry = NULL; + u32 i, count; + GF_SampleGroupDescriptionBox *sgdesc = NULL; + GF_BitStream *bs; + + count = gf_list_count(stbl->sampleGroupsDescription); + for (i = 0; i < count; i++) { + sgdesc = (GF_SampleGroupDescriptionBox *)gf_list_get(stbl->sampleGroupsDescription, i); + if (sgdesc->grouping_type == grouping_type) + break; + sgdesc = NULL; + } + if (!sgdesc) + return GF_BAD_PARAM; + + entry = gf_list_get(sgdesc->group_descriptions, sampleGroupDescriptionIndex-1); + if (!entry) + return GF_BAD_PARAM; + + switch (grouping_type) { + case GF_4CC( 'r', 'a', 'p', ' ' ): + { + char udta[2]; + bs = gf_bs_new(udta, 2*sizeof(char), GF_BITSTREAM_WRITE); + gf_bs_write_u8(bs, ((GF_VisualRandomAccessEntry *)entry)->num_leading_samples_known); + gf_bs_write_u8(bs, ((GF_VisualRandomAccessEntry *)entry)->num_leading_samples); + return gf_isom_set_sample_group_info_ex(NULL, traf, 0, grouping_type, udta, sg_rap_create_entry, sg_rap_compare_entry); + } + case GF_4CC( 'r', 'o', 'l', 'l' ): + { + char udta[2]; + bs = gf_bs_new(udta, 2*sizeof(char), GF_BITSTREAM_WRITE); + gf_bs_write_u16(bs, ((GF_RollRecoveryEntry *)entry)->roll_distance); + return gf_isom_set_sample_group_info_ex(NULL, traf, 0, grouping_type, udta, sg_roll_create_entry, sg_roll_compare_entry); + } + case GF_4CC( 's', 'e', 'i', 'g' ): + { + char udta[20]; + bs = gf_bs_new(udta, 20*sizeof(char), GF_BITSTREAM_WRITE); + gf_bs_write_u24(bs, ((GF_CENCSampleEncryptionGroupEntry *)entry)->IsEncrypted); + gf_bs_write_u8(bs, ((GF_CENCSampleEncryptionGroupEntry *)entry)->IV_size); + gf_bs_write_data(bs, (char *) ((GF_CENCSampleEncryptionGroupEntry *)entry)->KID, 16); + gf_bs_del(bs); + return gf_isom_set_sample_group_info_ex(NULL, traf, 0, grouping_type, udta, sg_encryption_create_entry, sg_encryption_compare_entry); + } + default: + return GF_BAD_PARAM; + } + } + + return gf_isom_add_sample_group_entry(traf->sampleGroups, 0, grouping_type, sampleGroupDescriptionIndex); +} + /*sample encryption information group can be in stbl or traf*/ GF_EXPORT GF_Err gf_isom_set_sample_cenc_group(GF_ISOFile *movie, u32 track, u32 sample_number, Bool isEncrypted, u8 IV_size, bin128 KeyID) diff --git a/src/isomedia/media.c b/src/isomedia/media.c index 036431c..31209a8 100644 --- a/src/isomedia/media.c +++ b/src/isomedia/media.c @@ -175,13 +175,25 @@ GF_Err Media_GetESD(GF_MediaBox *mdia, u32 sampleDescIndex, GF_ESD **out_esd, Bo case GF_ISOM_BOX_TYPE_AVC2: case GF_ISOM_BOX_TYPE_AVC3: case GF_ISOM_BOX_TYPE_AVC4: - case GF_ISOM_BOX_TYPE_SVC1: case GF_ISOM_BOX_TYPE_HVC1: case GF_ISOM_BOX_TYPE_HEV1: case GF_ISOM_BOX_TYPE_HVC2: case GF_ISOM_BOX_TYPE_HEV2: + esd = ((GF_MPEGVisualSampleEntryBox*) entry)->emul_esd; + break; + case GF_ISOM_BOX_TYPE_SVC1: + if ((mdia->mediaTrack->extractor_mode & 0x0000FFFF) != GF_ISOM_NALU_EXTRACT_INSPECT) + AVC_RewriteESDescriptorEx((GF_MPEGVisualSampleEntryBox*) entry, mdia); + else + AVC_RewriteESDescriptorEx((GF_MPEGVisualSampleEntryBox*) entry, NULL); + esd = ((GF_MPEGVisualSampleEntryBox*) entry)->emul_esd; + break; case GF_ISOM_BOX_TYPE_SHC1: case GF_ISOM_BOX_TYPE_SHV1: + if ((mdia->mediaTrack->extractor_mode & 0x0000FFFF) != GF_ISOM_NALU_EXTRACT_INSPECT) + HEVC_RewriteESDescriptorEx((GF_MPEGVisualSampleEntryBox*) entry, mdia); + else + HEVC_RewriteESDescriptorEx((GF_MPEGVisualSampleEntryBox*) entry, NULL); esd = ((GF_MPEGVisualSampleEntryBox*) entry)->emul_esd; break; case GF_ISOM_BOX_TYPE_MP4A: @@ -748,8 +760,8 @@ GF_Err Media_AddSample(GF_MediaBox *mdia, u64 data_offset, GF_ISOSample *sample, //The first non sync sample we see must create a syncTable if (sample->IsRAP) { - //insert it only if we have a sync table - if (stbl->SyncSample) { + //insert it only if we have a sync table and if we have an IDR slice + if (stbl->SyncSample && (sample->IsRAP == 1)){ e = stbl_AddRAP(stbl->SyncSample, sampleNumber); if (e) return e; } diff --git a/src/isomedia/movie_fragments.c b/src/isomedia/movie_fragments.c index 21b9cf4..7f49186 100644 --- a/src/isomedia/movie_fragments.c +++ b/src/isomedia/movie_fragments.c @@ -1774,12 +1774,16 @@ GF_Err gf_isom_fragment_add_sai(GF_ISOFile *output, GF_ISOFile *input, u32 Track GF_TrackBox *src_trak = gf_isom_get_track_from_file(input, TrackID); u32 boxType; GF_SampleEncryptionBox *senc; + u8 IV_size; + u32 IsEncrypted; if (!traf) return GF_BAD_PARAM; sai = NULL; + gf_isom_get_sample_cenc_info(input, trackNum, SampleNum, &IsEncrypted, &IV_size, NULL); e = gf_isom_cenc_get_sample_aux_info(input, trackNum, SampleNum, &sai, &boxType); if (e) return e; + sai->IV_size = IV_size; switch (boxType) { case GF_ISOM_BOX_UUID_PSEC: @@ -1813,7 +1817,7 @@ GF_Err gf_isom_fragment_add_sai(GF_ISOFile *output, GF_ISOFile *input, u32 Track gf_list_add(senc->samp_aux_info, sai); if (sai->subsample_count) senc->flags = 0x00000002; - gf_isom_cenc_set_saiz_saio(senc, NULL, traf, 18+6*sai->subsample_count); + gf_isom_cenc_set_saiz_saio(senc, NULL, traf, IsEncrypted ? IV_size+2+6*sai->subsample_count : 0); } @@ -1883,7 +1887,7 @@ GF_Err gf_isom_fragment_add_subsample(GF_ISOFile *movie, u32 TrackID, u32 subSam return gf_isom_add_subsample_info(traf->subs, last_sample, subSampleSize, priority, reserved, discardable); } -GF_Err gf_isom_fragment_copy_subsample(GF_ISOFile *dest, u32 TrackID, GF_ISOFile *orig, u32 track, u32 sampleNumber) +GF_Err gf_isom_fragment_copy_subsample(GF_ISOFile *dest, u32 TrackID, GF_ISOFile *orig, u32 track, u32 sampleNumber, Bool sgpd_in_traf) { u32 i, count, last_sample; GF_SubSampleInfoEntry *sub_sample; @@ -1961,11 +1965,12 @@ GF_Err gf_isom_fragment_copy_subsample(GF_ISOFile *dest, u32 TrackID, GF_ISOFile first_sample_in_entry = last_sample_in_entry+1; continue; } - /*found our sample, add it to trak->sampleGroups*/ + if (!traf->sampleGroups) traf->sampleGroups = gf_list_new(); - e = gf_isom_add_sample_group_entry(traf->sampleGroups, 0, sg->grouping_type, sg->sample_entries[j].group_description_index); + /*found our sample, add it to trak->sampleGroups*/ + e = gf_isom_copy_sample_group_entry_to_traf(traf, trak->Media->information->sampleTable, sg->grouping_type, sg->sample_entries[j].group_description_index, sgpd_in_traf); break; } } diff --git a/src/isomedia/stbl_read.c b/src/isomedia/stbl_read.c index 621feeb..fb5f8ce 100644 --- a/src/isomedia/stbl_read.c +++ b/src/isomedia/stbl_read.c @@ -175,6 +175,9 @@ GF_Err stbl_GetSampleDTS_and_Duration(GF_TimeToSampleBox *stts, u32 SampleNumber GF_SttsEntry *ent; (*DTS) = 0; + if (duration) { + *duration = 0; + } if (!stts || !SampleNumber) return GF_BAD_PARAM; ent = NULL; diff --git a/src/isomedia/stbl_write.c b/src/isomedia/stbl_write.c index 04c022a..dec328f 100644 --- a/src/isomedia/stbl_write.c +++ b/src/isomedia/stbl_write.c @@ -870,7 +870,7 @@ GF_Err stbl_RemoveDTS(GF_SampleTableBox *stbl, u32 sampleNumber, u32 LastAUDefDu } else { stts->entries[0].sampleDelta = (u32) DTSs[1] /*- DTS[0]==0 */; } - for (i=0; iSampleSize->sampleCount-1; i++) { + for (i=1; iSampleSize->sampleCount-1; i++) { if (i+1 == stbl->SampleSize->sampleCount-1) { //and by default, our last sample has the same delta as the prev // stts->entries[j].sampleCount++; @@ -879,6 +879,10 @@ GF_Err stbl_RemoveDTS(GF_SampleTableBox *stbl, u32 sampleNumber, u32 LastAUDefDu } else { j++; stts->nb_entries++; + if (j+1==stts->alloc_size) { + stts->alloc_size++; + stts->entries = gf_realloc(stts->entries, sizeof(GF_SttsEntry) * stts->alloc_size); + } stts->entries[j].sampleCount = 1; stts->entries[j].sampleDelta = (u32) (DTSs[i+1] - DTSs[i]); } @@ -1031,20 +1035,19 @@ GF_Err stbl_RemoveRAP(GF_SampleTableBox *stbl, u32 sampleNumber) stss->alloc_size = stss->nb_entries = 0; return GF_OK; } - //the real pain is that we may actually not have to change anything.. - for (i=0; inb_entries; i++) { - if (sampleNumber == stss->sampleNumbers[i]) goto found; - } - //nothing to do - return GF_OK; -found: - //a small opt: the sample numbers are in order... - i++; - for (;inb_entries; i++) { - stss->sampleNumbers[i-1] = stss->sampleNumbers[i]; + for (i=0; inb_entries; i++) { + //found the sample + if (sampleNumber == stss->sampleNumbers[i]) { + memmove(&stss->sampleNumbers[i], &stss->sampleNumbers[i+1], sizeof(u32)* (stss->nb_entries-i-1) ); + stss->nb_entries--; + } + + if (sampleNumber < stss->sampleNumbers[i]) { + assert(stss->sampleNumbers[i]); + stss->sampleNumbers[i]--; + } } - stss->nb_entries -= 1; return GF_OK; } diff --git a/src/isomedia/track.c b/src/isomedia/track.c index 6a7cfd5..53e2162 100644 --- a/src/isomedia/track.c +++ b/src/isomedia/track.c @@ -396,6 +396,44 @@ GF_Err SetTrackDuration(GF_TrackBox *trak) #ifndef GPAC_DISABLE_ISOM_FRAGMENTS +Bool gf_isom_is_identical_sgpd(void *ptr1, void *ptr2, u32 grouping_type) +{ + GF_BitStream *bs1, *bs2; + char *buf1, *buf2; + u32 len1, len2; + Bool res = GF_FALSE; + + if (!ptr1 || !ptr2) + return GF_FALSE; + + bs1 = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + if (grouping_type) { + sgpd_write_entry(grouping_type, ptr1, bs1); + } else { + sgpd_Write((GF_Box *)ptr1, bs1); + } + gf_bs_get_content(bs1, &buf1, &len1); + gf_bs_del(bs1); + + bs2 = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + if (grouping_type) { + sgpd_write_entry(grouping_type, ptr2, bs2); + } else { + sgpd_Write((GF_Box *)ptr2, bs2); + } + gf_bs_get_content(bs2, &buf2, &len2); + gf_bs_del(bs2); + + + if ((len1==len2) && !memcmp(buf1, buf2, len1)) + res = GF_TRUE; + + gf_free(buf1); + gf_free(buf2); + + return res; +} + GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, Bool is_first_merge) { u32 i, j, chunk_size; @@ -512,6 +550,9 @@ GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, if (traf->sampleGroups) { GF_List *groups; GF_List *groupDescs; + Bool is_identical_sgpd = GF_TRUE; + u32 *new_idx = NULL; + if (!trak->Media->information->sampleTable->sampleGroups) trak->Media->information->sampleTable->sampleGroups = gf_list_new(); @@ -535,14 +576,36 @@ GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, } /*merge descriptions*/ else { - u32 idx = gf_list_count(new_sgdesc->group_descriptions); - for (j=idx; jgroup_descriptions); j++) { - void *ptr = gf_list_get(sgdesc->group_descriptions, j); - if (ptr) { - gf_list_add(new_sgdesc->group_descriptions, ptr); - gf_list_rem(sgdesc->group_descriptions, j); - j--; + u32 count; + + is_identical_sgpd = gf_isom_is_identical_sgpd(new_sgdesc, sgdesc, 0); + if (is_identical_sgpd) + continue; + + new_idx = (u32 *)gf_malloc(gf_list_count(sgdesc->group_descriptions)*sizeof(u32)); + count = 0; + while (gf_list_count(sgdesc->group_descriptions)) { + void *sgpd_entry = gf_list_get(sgdesc->group_descriptions, 0); + Bool new_entry = GF_TRUE; + + for (j = 0; j < gf_list_count(new_sgdesc->group_descriptions); j++) { + void *ptr = gf_list_get(new_sgdesc->group_descriptions, j); + if (gf_isom_is_identical_sgpd(sgpd_entry, ptr, new_sgdesc->grouping_type)) { + new_idx[count] = j + 1; + count ++; + new_entry = GF_FALSE; + gf_free(sgpd_entry); + break; + } + } + + if (new_entry) { + gf_list_add(new_sgdesc->group_descriptions, sgpd_entry); + new_idx[count] = gf_list_count(new_sgdesc->group_descriptions); + count ++; } + + gf_list_rem(sgdesc->group_descriptions, 0); } } } @@ -566,21 +629,36 @@ GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, stbl_group->version = frag_group->version; gf_list_add(groups, stbl_group); } - if (frag_group->entry_count && stbl_group->entry_count && - (frag_group->sample_entries[0].group_description_index==stbl_group->sample_entries[stbl_group->entry_count-1].group_description_index) - ) { - stbl_group->sample_entries[stbl_group->entry_count - 1].sample_count += frag_group->sample_entries[0].sample_count; - if (frag_group->entry_count>1) { - stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count - 1)); - memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[1], sizeof(GF_SampleGroupEntry) * (frag_group->entry_count - 1)); - stbl_group->entry_count += frag_group->entry_count - 1; + + if (is_identical_sgpd) { + //adjust sgpd index: in traf index start at 0x1001 + for (j = 0; j < frag_group->entry_count; j++) + frag_group->sample_entries[j].group_description_index &= 0x0FFFF; + if (frag_group->entry_count && stbl_group->entry_count && + (frag_group->sample_entries[0].group_description_index==stbl_group->sample_entries[stbl_group->entry_count-1].group_description_index) + ) { + stbl_group->sample_entries[stbl_group->entry_count - 1].sample_count += frag_group->sample_entries[0].sample_count; + if (frag_group->entry_count>1) { + stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count - 1)); + memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[1], sizeof(GF_SampleGroupEntry) * (frag_group->entry_count - 1)); + stbl_group->entry_count += frag_group->entry_count - 1; + } + } else { + stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count)); + memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[0], sizeof(GF_SampleGroupEntry) * frag_group->entry_count); + stbl_group->entry_count += frag_group->entry_count; } } else { stbl_group->sample_entries = gf_realloc(stbl_group->sample_entries, sizeof(GF_SampleGroupEntry) * (stbl_group->entry_count + frag_group->entry_count)); + //adjust sgpd index + for (j = 0; j < frag_group->entry_count; j++) + frag_group->sample_entries[j].group_description_index = new_idx[j]; memcpy(&stbl_group->sample_entries[stbl_group->entry_count], &frag_group->sample_entries[0], sizeof(GF_SampleGroupEntry) * frag_group->entry_count); stbl_group->entry_count += frag_group->entry_count; } } + + if (new_idx) gf_free(new_idx); } if (gf_isom_is_cenc_media(trak->moov->mov, gf_isom_get_tracknum_from_id(trak->moov, trak->Header->trackID), 1)) { @@ -655,7 +733,7 @@ GF_Err MergeTrack(GF_TrackBox *trak, GF_TrackFragmentBox *traf, u64 moof_offset, /*cur_position = gf_bs_get_position(trak->moov->mov->movieFileMap->bs); gf_bs_seek(trak->moov->mov->movieFileMap->bs, offset); - buffer = (char *)malloc(size); + buffer = (char *)gf_malloc(size); gf_bs_read_data(trak->moov->mov->movieFileMap->bs, buffer, size); gf_bs_seek(trak->moov->mov->movieFileMap->bs, cur_position); diff --git a/src/mcrypt/cbc.c b/src/mcrypt/cbc.c index 7f25977..eec3e9a 100644 --- a/src/mcrypt/cbc.c +++ b/src/mcrypt/cbc.c @@ -30,8 +30,9 @@ typedef struct cbc_buf { /* CBC MODE */ -static GF_Err _init_mcrypt( CBC_BUFFER* buf,void *key, int lenofkey, void *IV, int size) +static GF_Err _init_mcrypt( void* _buf,void *key, int lenofkey, void *IV, int size) { + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; /* For cbc */ buf->previous_ciphertext = buf->previous_cipher = NULL; @@ -58,9 +59,10 @@ static GF_Err _init_mcrypt( CBC_BUFFER* buf,void *key, int lenofkey, void *IV, i return GF_OUT_OF_MEM; } -static GF_Err _mcrypt_set_state( CBC_BUFFER* buf, void *IV, int size) +static GF_Err _mcrypt_set_state( void* _buf, void *IV, int size) { /* For cbc */ + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; memcpy(buf->previous_ciphertext, IV, size); memcpy(buf->previous_cipher, IV, size); @@ -68,8 +70,9 @@ static GF_Err _mcrypt_set_state( CBC_BUFFER* buf, void *IV, int size) return GF_OK; } -static GF_Err _mcrypt_get_state( CBC_BUFFER* buf, void *IV, int *size) +static GF_Err _mcrypt_get_state( void* _buf, void *IV, int *size) { + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; if (*size < buf->blocksize) { *size = buf->blocksize; return GF_BAD_PARAM; @@ -82,13 +85,15 @@ static GF_Err _mcrypt_get_state( CBC_BUFFER* buf, void *IV, int *size) } -static void _end_mcrypt( CBC_BUFFER* buf) { +static void _end_mcrypt( void* _buf) { + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; gf_free(buf->previous_ciphertext); gf_free(buf->previous_cipher); } -static GF_Err _mcrypt( CBC_BUFFER* buf, void *plaintext, int len, int blocksize, void* akey, void (*func)(void*,void*), void (*func2)(void*,void*)) +static GF_Err _mcrypt( void* _buf, void *plaintext, int len, int blocksize, void* akey, void (*func)(void*,void*), void (*func2)(void*,void*)) { + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; u32 *fplain = plaintext; u32 *plain; int dblock, dlen, i, j; @@ -117,8 +122,9 @@ static GF_Err _mcrypt( CBC_BUFFER* buf, void *plaintext, int len, int blocksize, -static GF_Err _mdecrypt( CBC_BUFFER* buf, void *ciphertext, int len, int blocksize,void* akey, void (*func)(void*,void*), void (*func2)(void*,void*)) +static GF_Err _mdecrypt( void* _buf, void *ciphertext, int len, int blocksize,void* akey, void (*func)(void*,void*), void (*func2)(void*,void*)) { + CBC_BUFFER* buf = (CBC_BUFFER* )_buf; u32 *cipher; u32 *fcipher = ciphertext; int i, j, dlen, dblock; diff --git a/src/media_tools/av_parsers.c b/src/media_tools/av_parsers.c index 8885d03..83b1817 100644 --- a/src/media_tools/av_parsers.c +++ b/src/media_tools/av_parsers.c @@ -3298,28 +3298,192 @@ void profile_tier_level(GF_BitStream *bs, Bool ProfilePresentFlag, u8 MaxNumSubL } } -void bit_rate_pic_rate_info(GF_BitStream *bs, u8 level_low, u8 level_high, HEVC_VPS *vps) +static u32 scalability_type_to_idx(HEVC_VPS *vps, u32 scalability_type) { - u8 i; - for (i=level_low; i<=level_high; i++) { - Bool bit_rate_info_present_flag = gf_bs_read_int(bs, 1); - Bool pic_rate_info_present_flag = gf_bs_read_int(bs, 1); - if (bit_rate_info_present_flag) { - vps->rates[i].avg_bit_rate = gf_bs_read_int(bs, 16); - vps->rates[i].max_bit_rate = gf_bs_read_int(bs, 16); + u32 idx = 0, type; + for (type=0; type < scalability_type; type++) { + idx += (vps->scalability_mask[type] ? 1 : 0 ); + } + return idx; +} + +#define SHVC_VIEW_ORDER_INDEX 1 +#define SHVC_SCALABILITY_INDEX 2 + +static u32 shvc_get_scalability_id(HEVC_VPS *vps, u32 layer_id_in_vps, u32 scalability_type ) +{ + u32 idx; + if (!vps->scalability_mask[scalability_type]) return 0; + idx = scalability_type_to_idx(vps, scalability_type); + return vps->dimension_id[layer_id_in_vps][idx]; +} + +static u32 shvc_get_view_index(HEVC_VPS *vps, u32 id) +{ + return shvc_get_scalability_id(vps, vps->layer_id_in_vps[id], SHVC_VIEW_ORDER_INDEX); +} + +static u32 shvc_get_num_views(HEVC_VPS *vps) +{ + u32 numViews = 1, i; + for (i=0; imax_layers; i++ ) { + u32 layer_id = vps->layer_id_in_nuh[i]; + if (i>0 && ( shvc_get_view_index( vps, layer_id) != shvc_get_scalability_id( vps, i-1, SHVC_VIEW_ORDER_INDEX) )) { + numViews++; + } + } + return numViews; +} + +static void shvc_parse_rep_format(HEVC_RepFormat *fmt, GF_BitStream *bs) +{ + u8 chroma_bitdepth_present_flag = gf_bs_read_int(bs, 1); + fmt->pic_width_luma_samples = gf_bs_read_int(bs, 16); + fmt->pic_height_luma_samples = gf_bs_read_int(bs, 16); + if (chroma_bitdepth_present_flag) { + fmt->chroma_format_idc = gf_bs_read_int(bs, 2); + + if (fmt->chroma_format_idc == 3) + fmt->separate_colour_plane_flag = gf_bs_read_int(bs, 1); + fmt->bit_depth_luma = 1 + gf_bs_read_int(bs, 4); + fmt->bit_depth_chroma = 1 + gf_bs_read_int(bs, 4); + } +} + +static void hevc_parse_vps_extension(HEVC_VPS *vps, GF_BitStream *bs) +{ + u8 splitting_flag, vps_nuh_layer_id_present_flag, view_id_len; + u32 i, j, NumScalabilityTypes, num_profile_tier_level, num_add_output_layer_sets, NumOutputLayerSets; + u8 dimension_id_len[62]; + u8 direct_dependency_flag[62][62]; + u8 /*avc_base_layer_flag, */vps_number_layer_sets, /*default_one_target_output_layer_flag, */rep_format_idx_present_flag; + + /*avc_base_layer_flag = */gf_bs_read_int(bs, 1); + splitting_flag = gf_bs_read_int(bs, 1); + NumScalabilityTypes =0; + for (i=0; i<16; i++) { + vps->scalability_mask[i] = gf_bs_read_int(bs, 1); + NumScalabilityTypes += vps->scalability_mask[i]; + } + dimension_id_len[0] = 0; + for (i=0; i<(NumScalabilityTypes - splitting_flag); i++) { + dimension_id_len[i] = 1 + gf_bs_read_int(bs, 3); + } + + vps->layer_id_in_nuh[0] = 0; + vps->layer_id_in_vps[0] = 0; + vps_nuh_layer_id_present_flag = gf_bs_read_int(bs, 1); + for (i=1; imax_layers; i++) { + if (vps_nuh_layer_id_present_flag) { + vps->layer_id_in_nuh[i] = gf_bs_read_int(bs, 6); + } else { + vps->layer_id_in_nuh[i] = i; + } + vps->layer_id_in_vps[vps->layer_id_in_nuh[i]] = i; + + if (!splitting_flag) { + for (j=0; jdimension_id[i][j] = gf_bs_read_int(bs, dimension_id_len[j]); + } + } + } + + view_id_len = gf_bs_read_int(bs, 4); + for( i = 0; i < shvc_get_num_views(vps); i++ ){ + /*m_viewIdVal[i] = */ gf_bs_read_int(bs, view_id_len + 1); + } + + for (i=1; imax_layers; i++) { + for (j=0; jmax_layers - 1; i++) { + /*sub_layers_vps_max_minus1[ i ]*/gf_bs_read_int(bs, 3); + } + } + + if (/*max_tid_ref_present_flag = */gf_bs_read_int(bs, 1)) { + for (i=0; imax_layers ; i++) { + for (j= i+1; j < vps->max_layers; j++) { + if (direct_dependency_flag[j][i]) + /*max_tid_il_ref_pics_plus1[ i ][ j ]*/gf_bs_read_int(bs, 3); + } } - if (pic_rate_info_present_flag) { - vps->rates[i].constand_pic_rate_idc = gf_bs_read_int(bs, 2); - vps->rates[i].avg_pic_rate = gf_bs_read_int(bs, 16); + } + /*all_ref_layers_active_flag*/gf_bs_read_int(bs, 1); + + vps_number_layer_sets = 1+gf_bs_read_int(bs, 10); + num_profile_tier_level = 1+gf_bs_read_int(bs, 6); + + for (i=1; i < num_profile_tier_level; i++) { + Bool vps_profile_present_flag = gf_bs_read_int(bs, 1); + if (!vps_profile_present_flag) { + /*vps->profile_ref[i] = */gf_bs_read_int(bs, 6); } + profile_tier_level(bs, vps_profile_present_flag, vps->max_sub_layers-1, &vps->ext_ptl[i-1] ); + } + NumOutputLayerSets = vps_number_layer_sets; + if (/*more_output_layer_sets_than_default_flag */gf_bs_read_int(bs, 1)) { + num_add_output_layer_sets = gf_bs_read_int(bs, 10)+1; + NumOutputLayerSets += num_add_output_layer_sets; } + + /*default_one_target_output_layer_flag = 0;*/ + if (NumOutputLayerSets > 1) { + /*default_one_target_output_layer_flag = */gf_bs_read_int(bs, 1); + } + vps->profile_level_tier_idx[0] = 0; + for (i=1; i vps->num_layer_sets - 1) { + GF_LOG(GF_LOG_ERROR, GF_LOG_CODING, ("[HEVC] VPS Extensions: not supported number of layers\n")); + } + nb_bits = 1; + while ((u32) (1 << nb_bits) < num_profile_tier_level) { + nb_bits++; + } + vps->profile_level_tier_idx[i] = gf_bs_read_int(bs, nb_bits); + } + + if (vps->max_layers - 1 > 0 ) + /*alt_output_layer_flag*/gf_bs_read_int(bs, 1); + + rep_format_idx_present_flag = gf_bs_read_int(bs, 1); + if (rep_format_idx_present_flag ) { + vps->num_rep_formats = 1 + gf_bs_read_int(bs, 8); + } else { + vps->num_rep_formats = vps->max_layers; + } + for (i=0; inum_rep_formats; i++) { + shvc_parse_rep_format(&vps->rep_formats[i], bs); + } + vps->rep_format_idx[0] = 0; + for (i=1; imax_layers; i++) { + if (rep_format_idx_present_flag) { + if (vps->num_rep_formats > 1) { + vps->rep_format_idx[i] = gf_bs_read_int(bs, 8); + } else { + vps->rep_format_idx[i] = 0; + } + } else { + vps->rep_format_idx[i] = i; + } + } + //TODO - we don't use the rest ... + } +GF_EXPORT s32 gf_media_hevc_read_vps(char *data, u32 size, HEVCState *hevc) { GF_BitStream *bs; + u8 vps_sub_layer_ordering_info_present_flag, vps_extension_flag; char *data_without_emulation_bytes = NULL; u32 data_without_emulation_bytes_size = 0; + u32 i, j; s32 vps_id = -1; HEVC_VPS *vps; @@ -3335,22 +3499,55 @@ s32 gf_media_hevc_read_vps(char *data, u32 size, HEVCState *hevc) if (vps_id>=16) goto exit; - vps = &hevc->vps[vps_id]; + vps = &hevc->vps[vps_id]; if (!vps->state) { vps->id = vps_id; vps->state = 1; } /* vps_reserved_three_2bits = */ gf_bs_read_int(bs, 2); - /* vps_reserved_zero_6bits = */ gf_bs_read_int(bs, 6); - vps->max_sub_layer = gf_bs_read_int(bs, 3) + 1; + vps->max_layers = 1 + gf_bs_read_int(bs, 6); + vps->max_sub_layers = gf_bs_read_int(bs, 3) + 1; vps->temporal_id_nesting = gf_bs_read_int(bs, 1); /* vps_reserved_ffff_16bits = */ gf_bs_read_int(bs, 16); - profile_tier_level(bs, 1, vps->max_sub_layer-1, &vps->ptl); - bit_rate_pic_rate_info(bs, 0, vps->max_sub_layer-1, vps); - + profile_tier_level(bs, 1, vps->max_sub_layers-1, &vps->ptl); + + vps_sub_layer_ordering_info_present_flag = gf_bs_read_int(bs, 1); + for (i=(vps_sub_layer_ordering_info_present_flag ? 0 : vps->max_sub_layers - 1); i < vps->max_sub_layers; i++) { + /*vps_max_dec_pic_buffering_minus1[i] = */bs_get_ue(bs); + /*vps_max_num_reorder_pics[i] = */bs_get_ue(bs); + /*vps_max_latency_increase_plus1[i] = */bs_get_ue(bs); + } + vps->max_layer_id = gf_bs_read_int(bs, 6); + vps->num_layer_sets = bs_get_ue(bs) + 1; + for (i=1; i < vps->num_layer_sets; i++) { + for (j=0; j <= vps->max_layer_id; j++) { + /*layer_id_included_flag[ i ][ j ]*/gf_bs_read_int(bs, 1); + } + } + if (/*vps_timing_info_present_flag*/gf_bs_read_int(bs, 1)) { + u32 vps_num_hrd_parameters; + /*u32 vps_num_units_in_tick = */gf_bs_read_int(bs, 32); + /*u32 vps_time_scale = */gf_bs_read_int(bs, 32); + if (/*vps_poc_proportional_to_timing_flag*/gf_bs_read_int(bs, 1)) { + /*vps_num_ticks_poc_diff_one_minus1*/bs_get_ue(bs); + } + vps_num_hrd_parameters = bs_get_ue(bs); + for( i = 0; i < vps_num_hrd_parameters; i++ ) { + //Bool cprms_present_flag=1; + /*hrd_layer_set_idx[ i ] = */bs_get_ue(bs); + if (i>0) + /*cprms_present_flag = */gf_bs_read_int(bs, 1) ; + // hevc_parse_hrd_parameters(cprms_present_flag, vps->max_sub_layers - 1); + } + } + vps_extension_flag = gf_bs_read_int(bs, 1); + if (vps_extension_flag ) { + gf_bs_align(bs); + hevc_parse_vps_extension(vps, bs); + vps_extension_flag = gf_bs_read_int(bs, 1); + } - //and we don't care about the rest for now exit: gf_bs_del(bs); gf_free(data_without_emulation_bytes); @@ -3371,15 +3568,15 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 char *data_without_emulation_bytes = NULL; u32 data_without_emulation_bytes_size = 0; s32 vps_id, sps_id = -1; - u8 max_sub_layers_minus1, flag; - u8 layer_id/*, temporal_id*/; - Bool update_rep_format_flag; + u8 max_sub_layers_minus1, update_rep_format_flag, flag; + u8 layer_id/*, temporal_id, sps_rep_format_idx*/; + Bool scaling_list_enable_flag; u32 i, nb_CTUs, depth; u32 log2_diff_max_min_luma_coding_block_size; u32 log2_min_transform_block_size, log2_min_luma_coding_block_size; - Bool sps_sub_layer_ordering_info_present_flag; HEVC_SPS *sps; + HEVC_VPS *vps; HEVC_ProfileTierLevel ptl; if (vui_flag_pos) *vui_flag_pos = 0; @@ -3388,7 +3585,6 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 data_without_emulation_bytes = gf_malloc(size*sizeof(char)); data_without_emulation_bytes_size = avc_remove_emulation_bytes(data, data_without_emulation_bytes, size); bs = gf_bs_new(data_without_emulation_bytes, data_without_emulation_bytes_size, GF_BITSTREAM_READ); -// bs = gf_bs_new(data, size, GF_BITSTREAM_READ); if (!bs) goto exit; gf_bs_read_int(bs, 7); @@ -3401,10 +3597,8 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 } memset(&ptl, 0, sizeof(ptl)); - - //fixme with latest shvc syntax !! -// if (layer_id == 0) - { + max_sub_layers_minus1 = 0; + if (layer_id == 0) { max_sub_layers_minus1 = gf_bs_read_int(bs, 3); /*temporal_id_nesting_flag = */gf_bs_read_int(bs, 1); profile_tier_level(bs, 1, max_sub_layers_minus1, &ptl); @@ -3415,8 +3609,6 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 sps_id = -1; goto exit; } - //fixme with latest shvc syntax !! - if (layer_id) sps_id=1; sps = &hevc->sps[sps_id]; if (!sps->state) { @@ -3425,14 +3617,27 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 sps->vps_id = vps_id; } sps->ptl = ptl; + vps = &hevc->vps[vps_id]; + //sps_rep_format_idx = 0; + update_rep_format_flag = 0; if (layer_id > 0) { -// update_rep_format_flag = gf_bs_read_int(bs, 1); - update_rep_format_flag = 1; + update_rep_format_flag = gf_bs_read_int(bs, 1); + if (update_rep_format_flag) { + sps->rep_format_idx = gf_bs_read_int(bs, 8); + } else { + sps->rep_format_idx = vps->rep_format_idx[layer_id]; + } + sps->width = vps->rep_formats[sps->rep_format_idx].pic_width_luma_samples; + sps->height = vps->rep_formats[sps->rep_format_idx].pic_height_luma_samples; + sps->chroma_format_idc = vps->rep_formats[sps->rep_format_idx].chroma_format_idc; + sps->bit_depth_luma = vps->rep_formats[sps->rep_format_idx].bit_depth_luma; + sps->bit_depth_chroma = vps->rep_formats[sps->rep_format_idx].bit_depth_chroma; + sps->separate_colour_plane_flag = vps->rep_formats[sps->rep_format_idx].separate_colour_plane_flag; + + //TODO this is crude ... + sps->ptl = vps->ext_ptl[0]; } else { - update_rep_format_flag = 1; - } - if (update_rep_format_flag) { sps->chroma_format_idc = bs_get_ue(bs); if (sps->chroma_format_idc==3) sps->separate_colour_plane_flag = gf_bs_read_int(bs, 1); @@ -3446,18 +3651,20 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 sps->cw_top = bs_get_ue(bs); sps->cw_bottom = bs_get_ue(bs); } - if (update_rep_format_flag) { + if (layer_id == 0) { sps->bit_depth_luma = 8 + bs_get_ue(bs); sps->bit_depth_chroma = 8 + bs_get_ue(bs); } sps->log2_max_pic_order_cnt_lsb = 4 + bs_get_ue(bs); - sps_sub_layer_ordering_info_present_flag = gf_bs_read_int(bs, 1); - for(i=sps_sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i<=max_sub_layers_minus1; i++) { - /*max_dec_pic_buffering = */ bs_get_ue(bs); - /*num_reorder_pics = */ bs_get_ue(bs); - /*max_latency_increase = */ bs_get_ue(bs); + if (layer_id == 0) { + sps_sub_layer_ordering_info_present_flag = gf_bs_read_int(bs, 1); + for(i=sps_sub_layer_ordering_info_present_flag ? 0 : max_sub_layers_minus1; i<=max_sub_layers_minus1; i++) { + /*max_dec_pic_buffering = */ bs_get_ue(bs); + /*num_reorder_pics = */ bs_get_ue(bs); + /*max_latency_increase = */ bs_get_ue(bs); + } } log2_min_luma_coding_block_size = 3 + bs_get_ue(bs); @@ -3483,9 +3690,20 @@ static s32 gf_media_hevc_read_sps_ex(char *data, u32 size, HEVCState *hevc, u32 sps->bitsSliceSegmentAddress++; } - if (/*scaling_list_enable_flag = */ gf_bs_read_int(bs, 1)) { - if (/*sps_scaling_list_data_present_flag=*/gf_bs_read_int(bs, 1) ) { - //scaling_list_data( ) + scaling_list_enable_flag = gf_bs_read_int(bs, 1); + if (scaling_list_enable_flag) { + Bool sps_infer_scaling_list_flag = 0; + /*u8 sps_scaling_list_ref_layer_id = 0;*/ + if (layer_id>0) { + sps_infer_scaling_list_flag = gf_bs_read_int(bs, 1); + } + + if (sps_infer_scaling_list_flag) { + /*sps_scaling_list_ref_layer_id = */gf_bs_read_int(bs, 6); + } else { + if (/*sps_scaling_list_data_present_flag=*/gf_bs_read_int(bs, 1) ) { + //scaling_list_data( ) + } } } /*asymmetric_motion_partitions_enabled_flag= */ gf_bs_read_int(bs, 1); @@ -3917,26 +4135,31 @@ GF_Err gf_media_hevc_change_par(GF_HEVCConfig *hvcc, s32 ar_n, s32 ar_d) } GF_EXPORT -GF_Err gf_hevc_get_sps_info(char *sps_data, u32 sps_size, u32 *sps_id, u32 *width, u32 *height, s32 *par_n, s32 *par_d) +GF_Err gf_hevc_get_sps_info_with_state(HEVCState *hevc, char *sps_data, u32 sps_size, u32 *sps_id, u32 *width, u32 *height, s32 *par_n, s32 *par_d) { - HEVCState hevc; s32 idx; - memset(&hevc, 0, sizeof(HEVCState)); - hevc.sps_active_idx = -1; - - idx = gf_media_hevc_read_sps(sps_data, sps_size, &hevc); + idx = gf_media_hevc_read_sps(sps_data, sps_size, hevc); if (idx<0) { return GF_NON_COMPLIANT_BITSTREAM; } if (sps_id) *sps_id = idx; - if (width) *width = hevc.sps[idx].width; - if (height) *height = hevc.sps[idx].height; - if (par_n) *par_n = hevc.sps[idx].aspect_ratio_info_present_flag ? hevc.sps[idx].sar_width : (u32) -1; - if (par_d) *par_d = hevc.sps[idx].aspect_ratio_info_present_flag ? hevc.sps[idx].sar_height : (u32) -1; + if (width) *width = hevc->sps[idx].width; + if (height) *height = hevc->sps[idx].height; + if (par_n) *par_n = hevc->sps[idx].aspect_ratio_info_present_flag ? hevc->sps[idx].sar_width : (u32) -1; + if (par_d) *par_d = hevc->sps[idx].aspect_ratio_info_present_flag ? hevc->sps[idx].sar_height : (u32) -1; return GF_OK; } +GF_EXPORT +GF_Err gf_hevc_get_sps_info(char *sps_data, u32 sps_size, u32 *sps_id, u32 *width, u32 *height, s32 *par_n, s32 *par_d) +{ + HEVCState hevc; + memset(&hevc, 0, sizeof(HEVCState)); + hevc.sps_active_idx = -1; + return gf_hevc_get_sps_info_with_state(&hevc, sps_data, sps_size, sps_id, width, height, par_n, par_d); +} + #endif //GPAC_DISABLE_HEVC static u32 AC3_FindSyncCode(u8 *buf, u32 buflen) diff --git a/src/media_tools/dash_client.c b/src/media_tools/dash_client.c index b23d86f..543e4e9 100644 --- a/src/media_tools/dash_client.c +++ b/src/media_tools/dash_client.c @@ -42,11 +42,6 @@ /*set to 1 if you want MPD to use SegmentTemplate if possible instead of SegmentList*/ #define M3U8_TO_MPD_USE_TEMPLATE 0 -/*uncomment to only play the first adaptation set*/ -//#define DEBUG_FIRST_SET_ONLY -/*uncomment to play all but the first adaptation set*/ -//#define DEBUG_SKIP_FIRST_SET - typedef enum { GF_DASH_STATE_STOPPED = 0, /*period setup and playback chain creation*/ @@ -73,6 +68,7 @@ struct __dash_client char *base_url; u32 max_cache_duration, max_width, max_height; + u8 max_bit_per_pixel; u32 auto_switch_count; Bool keep_files, disable_switching, allow_local_mpd_update, enable_buffering, estimate_utc_drift; Bool is_m3u8; @@ -124,7 +120,11 @@ struct __dash_client Bool force_mpd_update; + u32 user_buffer_ms; + u32 min_timeout_between_404, segment_lost_after_ms; + + s32 debug_group_index; }; static void gf_dash_seek_group(GF_DashClient *dash, GF_DASH_Group *group); @@ -239,6 +239,9 @@ struct __dash_group /* maximum representation index we want to download*/ u32 force_max_rep_index; + //start time of currently downloaded segment - for now only used for merging SegmentTimeline, but we should use this to resync across representations ... + u64 current_start_time; + u32 current_timescale; void *udta; }; @@ -251,28 +254,29 @@ static const char *gf_dash_get_mime_type(GF_MPD_SubRepresentation *subrep, GF_MP return NULL; } -static void gf_dash_buffer_off(GF_DASH_Group *group, GF_DashClient *dash) +static void gf_dash_buffer_off(GF_DASH_Group *group) { - if (!dash->enable_buffering) return; + if (!group->dash->enable_buffering) return; if (group->buffering) { - assert(dash->nb_buffering); - dash->nb_buffering--; - if (!dash->nb_buffering) { - dash->dash_io->on_dash_event(dash->dash_io, GF_DASH_EVENT_BUFFER_DONE, -1, GF_OK); + assert(group->dash->nb_buffering); + group->dash->nb_buffering--; + if (!group->dash->nb_buffering) { + group->dash->dash_io->on_dash_event(group->dash->dash_io, GF_DASH_EVENT_BUFFER_DONE, -1, GF_OK); GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Session buffering done\n")); } group->buffering = 0; } } -static void gf_dash_buffer_on(GF_DASH_Group *group, GF_DashClient *dash) +static void gf_dash_buffer_on(GF_DASH_Group *group) { - if (!dash->enable_buffering) return; - if ((group->selection==GF_DASH_GROUP_SELECTED) && !group->buffering) { - if (!dash->nb_buffering) { + if (!group->dash->enable_buffering) return; + + if (!group->buffering) { + if (!group->dash->nb_buffering) { GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Starting session buffering\n")); } - dash->nb_buffering++; + group->dash->nb_buffering++; group->buffering = 1; } } @@ -346,7 +350,7 @@ static void gf_dash_update_buffering(GF_DASH_Group *group, GF_DashClient *dash) dash->dash_io->on_dash_event(dash->dash_io, GF_DASH_EVENT_BUFFERING, -1, GF_OK); if (group->cached[0].duration && group->nb_cached_segments>=group->max_buffer_segments) - gf_dash_buffer_off(group, dash); + gf_dash_buffer_off(group); } } @@ -374,7 +378,7 @@ static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group, u64 GF_MPD_SegmentTimeline *timeline = NULL; GF_MPD_Representation *rep = NULL; u32 shift, timescale; - u64 current_time, availabilityStartTime; + u64 current_time, current_time_no_timeshift, availabilityStartTime; u32 ast_diff, start_number; Double ast_offset = 0; @@ -405,14 +409,14 @@ static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group, u64 u64 utc; sscanf(val, LLU, &utc); group->dash->utc_drift_estimate = (s32) ((s64) fetch_time - (s64) utc); - GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Estimated UTC diff between client and server %d ms\n", group->dash->utc_drift_estimate)); + GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Estimated UTC diff between client and server %d ms (UTC fetch "LLU" - server UTC "LLU" - MPD AST "LLU" - MPD PublishTime "LLU"\n", group->dash->utc_drift_estimate, fetch_time, utc, group->dash->mpd->availabilityStartTime, group->dash->mpd->publishTime)); } else { val = group->dash->dash_io->get_header_value(group->dash->dash_io, group->dash->mpd_dnload, "Date"); if (val) { u64 utc = gf_net_parse_date(val); if (utc) { group->dash->utc_drift_estimate = (s32) ((s64) fetch_time - (s64) utc); - GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Estimated UTC diff between client and server %d ms\n", group->dash->utc_drift_estimate)); + GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Estimated UTC diff between client and server %d ms (UTC fetch "LLU" - server UTC "LLU" - MPD AST "LLU" - MPD PublishTime "LLU"\n", group->dash->utc_drift_estimate, fetch_time, utc, group->dash->mpd->availabilityStartTime, group->dash->mpd->publishTime)); } } } @@ -486,13 +490,22 @@ static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group, u64 } #endif + current_time_no_timeshift = current_time; if ( ((s32) mpd->time_shift_buffer_depth>=0)) { - shift = mpd->time_shift_buffer_depth; - shift *= group->dash->initial_time_shift_percent; - shift /= 100; - if (current_time < shift) current_time = 0; - else current_time -= shift; + if (group->dash->initial_time_shift_percent) { + shift = mpd->time_shift_buffer_depth; + shift *= group->dash->initial_time_shift_percent; + shift /= 100; + + if (current_time < shift) current_time = 0; + else current_time -= shift; + } else if (group->dash->user_buffer_ms) { + shift = MIN(group->dash->user_buffer_ms, mpd->time_shift_buffer_depth); + + if (current_time < shift) current_time = 0; + else current_time -= shift; + } } timeline = NULL; @@ -541,23 +554,59 @@ static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group, u64 if (timeline) { u64 start_segtime = 0; u64 segtime = 0; + u64 current_time_rescale; + u64 timeline_duration = 0; + u32 count; u32 i, seg_idx = 0; - current_time /= 1000; - current_time *= timescale; - for (i=0; ientries); i++) { + + current_time_rescale = current_time; + current_time_rescale /= 1000; + current_time_rescale *= timescale; + + count = gf_list_count(timeline->entries); + for (i=0; ientries, i); + + if (!i && (current_time_rescale + ent->duration < ent->start_time)) { + current_time_rescale = current_time_no_timeshift * timescale / 1000; + } + timeline_duration += (1+ent->repeat_count)*ent->duration; + + if (i+1 == count) timeline_duration -= ent->duration; + } + + for (i=0; ientries, i); - if (!segtime) start_segtime = segtime = ent->start_time; + if (!segtime) { + start_segtime = segtime = ent->start_time; + + //if current time is before the start of the previous segement, consider our timing is broken + if (current_time_rescale + ent->duration < segtime) { + GF_LOG(GF_LOG_WARNING, GF_LOG_DASH, ("[DASH] current time "LLU" is before start time "LLU" of first segment in timeline (timescale %d) by %g sec - using first segment as starting point\n", current_time_rescale, segtime, timescale, (segtime-current_time_rescale)*1.0/timescale)); + group->download_segment_index = seg_idx; + group->nb_segments_in_rep = count; + group->start_playback_range = (segtime)*1.0/timescale; + group->ast_at_init = availabilityStartTime - (u32) (ast_offset*1000); + group->broken_timing = 1; + return; + } + } repeat = 1+ent->repeat_count; while (repeat) { - if ((current_time >= segtime) && (current_time < segtime + ent->duration)) { + if ((current_time_rescale >= segtime) && (current_time_rescale < segtime + ent->duration)) { + GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Found segment %d for current time "LLU" is in SegmentTimeline ["LLU"-"LLU"] (timecale %d - current index %d)\n", seg_idx, current_time_rescale, start_segtime, segtime + ent->duration, timescale, group->download_segment_index)); + group->download_segment_index = seg_idx; - group->nb_segments_in_rep = seg_idx + 10; - - GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Found segment %d for current time "LLU" is in SegmentTimeline ["LLU"-"LLU"] - cannot estimate current startNumber, default to 0 ...\n", current_time, segtime, segtime + ent->duration)); + group->nb_segments_in_rep = seg_idx + count - i; + group->start_playback_range = (current_time)/1000.0; + group->ast_at_init = availabilityStartTime - (u32) (ast_offset*1000); - group->start_playback_range = (current_time )/1000.0; + //to remove - this is a hack to speedup starting for some strange MPDs which announce the live point as the first segment but have already produced the complete timeline + if (group->dash->utc_drift_estimate<0) { + group->ast_at_init -= (timeline_duration - (segtime-start_segtime)) *1000/timescale; + } return; } segtime += ent->duration; @@ -566,7 +615,7 @@ static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group, u64 } } //NOT FOUND !! - GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] current time "LLU" is NOT in SegmentTimeline ["LLU"-"LLU"] - cannot estimate current startNumber, default to 0 ...\n", current_time, start_segtime, segtime)); + GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] current time "LLU" is NOT in SegmentTimeline ["LLU"-"LLU"] - cannot estimate current startNumber, default to 0 ...\n", current_time_rescale, start_segtime, segtime)); group->download_segment_index = 0; group->nb_segments_in_rep = 10; group->broken_timing = 1; @@ -783,7 +832,6 @@ GF_Err gf_dash_download_resource(GF_DASHFileIO *dash_io, GF_DASHFileIOSession *s if (! *sess) { *sess = dash_io->create(dash_io, persistent_mode ? 1 : 0, url, group_idx); if (!(*sess)){ - assert(0); GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] Cannot try to download %s... OUT of memory ?\n", url)); return GF_OUT_OF_MEM; } @@ -1034,12 +1082,12 @@ static u64 gf_dash_segment_timeline_start(GF_MPD_SegmentTimeline *timeline, u32 return start_time; } -static Double gf_dash_get_segment_start_time(GF_DASH_Group *group, Double *segment_duration) +static u64 gf_dash_get_segment_start_time_with_timescale(GF_DASH_Group *group, u64 *segment_duration, u32 *scale) { GF_MPD_Representation *rep; GF_MPD_AdaptationSet *set; GF_MPD_Period *period; - Double start_time; + u64 start_time; u32 timescale; s32 segment_index; u64 duration; @@ -1076,15 +1124,12 @@ static Double gf_dash_get_segment_start_time(GF_DASH_Group *group, Double *segme if (! timescale) timescale=1; if (timeline) { - start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index, &duration); + start_time = gf_dash_segment_timeline_start(timeline, segment_index, &duration); } else { - start_time = segment_index * (Double) duration; - } - start_time /= timescale; - if (segment_duration) { - *segment_duration = (Double) duration; - *segment_duration /= timescale; + start_time = segment_index * duration; } + if (segment_duration) *segment_duration = duration; + if (scale) *scale = timescale; return start_time; } @@ -1107,17 +1152,26 @@ static Double gf_dash_get_segment_start_time(GF_DASH_Group *group, Double *segme if (!timescale) timescale=1; if (timeline) { - start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index, &duration); + start_time = gf_dash_segment_timeline_start(timeline, segment_index, &duration); } else { - start_time = segment_index * (Double) duration; + start_time = segment_index * duration; } - start_time /= timescale; + if (segment_duration) *segment_duration = duration; + if (scale) *scale = timescale; + return start_time; +} + +static Double gf_dash_get_segment_start_time(GF_DASH_Group *group, Double *segment_duration) +{ + u64 start, dur; + u32 scale; + + start = gf_dash_get_segment_start_time_with_timescale(group, &dur, &scale); if (segment_duration) { - *segment_duration = (Double) duration; - *segment_duration /= timescale; + *segment_duration = (Double) dur; + *segment_duration /= scale; } - - return start_time; + return ((Double)start)/scale; } u64 gf_dash_get_segment_availability_start_time(GF_MPD *mpd, GF_DASH_Group *group, u32 segment_index, u32 *seg_dur_ms) @@ -1198,10 +1252,46 @@ static void gf_dash_resolve_duration(GF_MPD_Representation *rep, GF_MPD_Adaptati } } -static GF_Err gf_dash_merge_segment_timeline(GF_MPD_SegmentList *old_list, GF_MPD_SegmentTemplate *old_template, GF_MPD_SegmentList *new_list, GF_MPD_SegmentTemplate *new_template, Double min_start_time) + +static u32 gf_dash_get_index_in_timeline(GF_MPD_SegmentTimeline *timeline, u64 start, u64 start_timescale, u64 timescale) +{ + u64 start_time = 0; + u32 idx = 0; + u32 i, count, repeat; + count = gf_list_count(timeline->entries); + for (i=0; ientries, i); + + if (!i || ent->start_time) start_time = ent->start_time; + + repeat = ent->repeat_count+1; + while (repeat) { + if (start_timescale==timescale) { + if (start_time == start ) return idx; + } else { + if (start_time*start_timescale == start * timescale) return idx; + } + start_time+=ent->duration; + repeat--; + idx++; + } + } + //end of list in regular case: segment was the last one of the previous list and no changes happend + if (start_timescale==timescale) { + if (start_time == start ) return count; + } else { + if (start_time*start_timescale == start * timescale) return count; + } + + GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] Error: could not find previous segment start in current timeline ! seeking to end of timeline\n")); + return count; +} + + +static GF_Err gf_dash_merge_segment_timeline(GF_DASH_Group *group, GF_DashClient *dash, GF_MPD_SegmentList *old_list, GF_MPD_SegmentTemplate *old_template, GF_MPD_SegmentList *new_list, GF_MPD_SegmentTemplate *new_template, Double min_start_time) { GF_MPD_SegmentTimeline *old_timeline, *new_timeline; - u32 idx; + u32 i, idx, prev_count, timescale; u64 start_time; GF_MPD_SegmentTimelineEntry *first_new_entry; @@ -1213,6 +1303,7 @@ static GF_Err gf_dash_merge_segment_timeline(GF_MPD_SegmentList *old_list, GF_MP } old_timeline = old_list->segment_timeline; new_timeline = new_list->segment_timeline; + timescale = new_list->timescale; } else if (old_template && old_template->segment_timeline) { if (!new_template || !new_template->segment_timeline) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] Error - cannot update playlist: segment timeline not present in new MPD segmentTemplate\n")); @@ -1220,9 +1311,23 @@ static GF_Err gf_dash_merge_segment_timeline(GF_MPD_SegmentList *old_list, GF_MP } old_timeline = old_template->segment_timeline; new_timeline = new_template->segment_timeline; + timescale = new_template->timescale; } if (!old_timeline && !new_timeline) return GF_OK; + prev_count = gf_list_count(old_timeline->entries); + + + if (group) { + group->current_start_time = gf_dash_get_segment_start_time_with_timescale(group, NULL, &group->current_timescale); + } else { + for (i=0; igroups); i++) { + GF_DASH_Group *a_group = gf_list_get(dash->groups, i); + a_group->current_start_time = gf_dash_get_segment_start_time_with_timescale(a_group, NULL, &a_group->current_timescale); + } + } + + first_new_entry = gf_list_get(new_timeline->entries, 0); idx = 0; start_time=0; @@ -1248,8 +1353,34 @@ static GF_Err gf_dash_merge_segment_timeline(GF_MPD_SegmentList *old_list, GF_MP gf_list_insert(new_timeline->entries, ent, idx); idx ++; gf_list_rem(old_timeline->entries, 0); + } + if (group) { + group->nb_segments_in_rep = gf_list_count(new_timeline->entries); + group->download_segment_index = gf_dash_get_index_in_timeline(new_timeline, group->current_start_time, group->current_timescale, timescale); + } else { + u32 i; + for (i=0; igroups); i++) { + GF_DASH_Group *a_group = gf_list_get(dash->groups, i); + a_group->nb_segments_in_rep = gf_list_count(new_timeline->entries); + a_group->download_segment_index = gf_dash_get_index_in_timeline(new_timeline, a_group->current_start_time, a_group->current_timescale, timescale); + } } + + + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Updated SegmentTimeline: %d entries (%d previously)\n", gf_list_count(new_timeline->entries), prev_count)); +#if 0 + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("Dumping new merged timeline: \n")); + start_time=0; + for (idx=0; idxentries); idx++) { + GF_MPD_SegmentTimelineEntry *ent = gf_list_get(new_timeline->entries, idx); + if (!idx) start_time = ent->start_time; + assert(!ent->start_time || (ent->start_time >=start_time)); + start_time += ent->duration*(1+ent->repeat_count); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("\tt="LLU" d=%d r=%d\n", ent->start_time, ent->duration, ent->repeat_count)); + } +#endif + return GF_OK; } @@ -1292,6 +1423,8 @@ static u32 gf_dash_purge_segment_timeline(GF_DASH_Group *group, Double min_start GF_MPD_SegmentList *segment_list; /*update next download index*/ group->download_segment_index -= nb_removed; + assert(group->nb_segments_in_rep >= nb_removed); + group->nb_segments_in_rep -= nb_removed; /*clean segmentList*/ segment_list = NULL; if (group->period && group->period->segment_list) segment_list = group->period->segment_list; @@ -1324,6 +1457,7 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) char * purl; Double timeline_start_time; GF_MPD *new_mpd; + Bool fetch_only = 0; if (!dash->mpd_dnload) { local_url = purl = NULL; @@ -1348,6 +1482,7 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) gf_free(dash->base_url); dash->base_url = gf_strdup(purl); } + fetch_only = 1; } } else { local_url = dash->dash_io->get_cache_name(dash->dash_io, dash->mpd_dnload); @@ -1448,6 +1583,8 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) /*TODO - check periods are the same !!*/ period = gf_list_get(dash->mpd->periods, dash->active_period_index); + if (fetch_only && !period) goto exit; + new_period = gf_list_get(new_mpd->periods, dash->active_period_index); if (!new_period) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] Error - cannot update playlist: missing period\n")); @@ -1471,8 +1608,10 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) for (group_idx=0; group_idxgroups); group_idx++) { GF_DASH_Group *group = gf_list_get(dash->groups, group_idx); - Double group_start = gf_dash_get_segment_start_time(group, NULL); - if (!group_idx || (timeline_start_time > group_start) ) timeline_start_time = group_start; + if (group->selection!=GF_DASH_GROUP_NOT_SELECTABLE) { + Double group_start = gf_dash_get_segment_start_time(group, NULL); + if (!group_idx || (timeline_start_time > group_start) ) timeline_start_time = group_start; + } } /*we can rewind our segments from timeshift*/ if (timeline_start_time > timeshift) timeline_start_time -= timeshift; @@ -1481,7 +1620,7 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) } /*update segmentTimeline at Period level*/ - e = gf_dash_merge_segment_timeline(period->segment_list, period->segment_template, new_period->segment_list, new_period->segment_template, timeline_start_time); + e = gf_dash_merge_segment_timeline(NULL, dash, period->segment_list, period->segment_template, new_period->segment_list, new_period->segment_template, timeline_start_time); if (e) { gf_mpd_del(new_mpd); return e; @@ -1494,7 +1633,8 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) GF_DASH_Group *group = gf_list_get(dash->groups, group_idx); /*update info even if the group is not selected !*/ - + if (group->selection==GF_DASH_GROUP_NOT_SELECTABLE) + continue; set = group->adaptation_set; new_set = gf_list_get(new_period->adaptation_sets, group_idx); @@ -1596,7 +1736,7 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) } } - e = gf_dash_merge_segment_timeline(rep->segment_list, rep->segment_template, new_rep->segment_list, new_rep->segment_template, timeline_start_time); + e = gf_dash_merge_segment_timeline(group, NULL, rep->segment_list, rep->segment_template, new_rep->segment_list, new_rep->segment_template, timeline_start_time); if (e) { gf_mpd_del(new_mpd); return e; @@ -1614,6 +1754,14 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) rep->mime_type = NULL; } } + + /*update segmentTimeline at AdaptationSet level before switching the set (old setup needed to compute current timing of each group) */ + e = gf_dash_merge_segment_timeline(group, NULL, set->segment_list, set->segment_template, new_set->segment_list, new_set->segment_template, timeline_start_time); + if (e) { + gf_mpd_del(new_mpd); + return e; + } + /*update group/period to new period*/ j = gf_list_find(group->period->adaptation_sets, group->adaptation_set); group->adaptation_set = gf_list_get(new_period->adaptation_sets, j); @@ -1622,13 +1770,6 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) j = gf_list_count(group->adaptation_set->representations); assert(j); - /*update segmentTimeline at AdaptationSet level*/ - e = gf_dash_merge_segment_timeline(set->segment_list, set->segment_template, new_set->segment_list, new_set->segment_template, timeline_start_time); - if (e) { - gf_mpd_del(new_mpd); - return e; - } - /*now that all possible SegmentXXX have been updated, purge them if needed: all segments ending before timeline_start_time will be removed from MPD*/ if (timeline_start_time) { @@ -1639,7 +1780,11 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) } if (new_mpd->availabilityStartTime != dash->mpd->availabilityStartTime) { - gf_dash_group_timeline_setup(new_mpd, group, fetch_time); + s64 diff = new_mpd->availabilityStartTime; + diff -= dash->mpd->availabilityStartTime; + if (diff < 0) diff = diff; + if (diff>3000) + gf_dash_group_timeline_setup(new_mpd, group, fetch_time); } group->maybe_end_of_stream = 0; @@ -1680,6 +1825,8 @@ static GF_Err gf_dash_update_manifest(GF_DashClient *dash) GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Updated AdaptationSet %d - %d segments\n", group_idx+1, group->nb_segments_in_rep)); } + +exit: /*swap representations - we don't need to update download_segment_index as it still points to the right entry in the merged list*/ if (dash->mpd) gf_mpd_del(dash->mpd); @@ -2225,13 +2372,9 @@ static GF_Err gf_dash_download_init_segment(GF_DashClient *dash, GF_DASH_Group * return e; } - /*no error and no init segment, go for media segment*/ + /*no error and no init segment, go for media segment - this is needed for TS so that the set of media streams can be + declared to the player */ if (!base_init_url) { - //if no init segment don't download first segment -#if 1 - gf_mx_v(dash->dl_mutex); - return GF_OK; -#else e = gf_dash_resolve_url(dash->mpd, rep, group, dash->base_url, GF_DASH_RESOLVE_URL_MEDIA, group->download_segment_index, &base_init_url, &start_range, &end_range, &group->current_downloaded_segment_duration, NULL); if (e) { gf_mx_v(dash->dl_mutex); @@ -2239,7 +2382,6 @@ static GF_Err gf_dash_download_init_segment(GF_DashClient *dash, GF_DASH_Group * return e; } nb_segment_read = 1; -#endif } else if (!group->bitstream_switching) { group->dont_delete_first_segment = 1; } @@ -2255,7 +2397,7 @@ static GF_Err gf_dash_download_init_segment(GF_DashClient *dash, GF_DASH_Group * /*do not erase local files*/ group->local_files = group->was_segment_base ? 0 : 1; if (group->local_files) { - gf_dash_buffer_off(group, dash); + gf_dash_buffer_off(group); } group->download_segment_index += nb_segment_read; @@ -2468,6 +2610,33 @@ static void gf_dash_skip_disabled_representation(GF_DASH_Group *group, GF_MPD_Re gf_dash_set_group_representation(group, rep); } +static void gf_dash_group_reset(GF_DashClient *dash, GF_DASH_Group *group) +{ + if (group->buffering) { + gf_dash_buffer_off(group); + } + if (group->urlToDeleteNext) { + if (!dash->keep_files && !group->local_files) + dash->dash_io->delete_cache_file(dash->dash_io, group->segment_download, group->urlToDeleteNext); + + gf_free(group->urlToDeleteNext); + group->urlToDeleteNext = NULL; + } + if (group->segment_download) { + dash->dash_io->del(dash->dash_io, group->segment_download); + group->segment_download = NULL; + } + while (group->nb_cached_segments) { + group->nb_cached_segments --; + if (!dash->keep_files && !group->local_files) + gf_delete_file(group->cached[group->nb_cached_segments].cache); + + gf_free(group->cached[group->nb_cached_segments].cache); + gf_free(group->cached[group->nb_cached_segments].url); + } + group->timeline_setup = 0; +} + static void gf_dash_reset_groups(GF_DashClient *dash) { /*send playback destroy event*/ @@ -2477,27 +2646,9 @@ static void gf_dash_reset_groups(GF_DashClient *dash) GF_DASH_Group *group = gf_list_last(dash->groups); gf_list_rem_last(dash->groups); - if (group->urlToDeleteNext) { - if (!dash->keep_files && !group->local_files) - dash->dash_io->delete_cache_file(dash->dash_io, group->segment_download, group->urlToDeleteNext); - - gf_free(group->urlToDeleteNext); - group->urlToDeleteNext = NULL; - } - if (group->segment_download) { - dash->dash_io->del(dash->dash_io, group->segment_download); - group->segment_download = NULL; - } - while (group->nb_cached_segments) { - group->nb_cached_segments --; - if (!dash->keep_files && !group->local_files) - gf_delete_file(group->cached[group->nb_cached_segments].cache); + gf_dash_group_reset(dash, group); - gf_free(group->cached[group->nb_cached_segments].cache); - gf_free(group->cached[group->nb_cached_segments].url); - } gf_free(group->cached); - if (group->service_mime) gf_free(group->service_mime); gf_free(group); @@ -2575,6 +2726,36 @@ GF_Err gf_dash_setup_groups(GF_DashClient *dash) continue; } } + if (rep->codecs && dash->max_bit_per_pixel) { + char *vid_type = strstr(rep->codecs, "hvc"); + if (!vid_type) vid_type = strstr(rep->codecs, "hev"); + if (!vid_type) vid_type = strstr(rep->codecs, "avc"); + if (!vid_type) vid_type = strstr(rep->codecs, "svc"); + if (!vid_type) vid_type = strstr(rep->codecs, "mvc"); + //HEVC + if (vid_type && (!strnicmp(rep->codecs, "hvc", 3) || !strnicmp(rep->codecs, "hev", 3))) { + char *pidc = rep->codecs+5; + if ((pidc[0]=='A') || (pidc[0]=='B') || (pidc[0]=='C')) pidc++; + //Main 10 !! + if (!strncmp(pidc, "2.", 2)) { + rep->playback.disabled = 1; + continue; + } + } + //AVC + if (vid_type && (!strnicmp(rep->codecs, "avc", 3) || !strnicmp(rep->codecs, "svc", 3) || !strnicmp(rep->codecs, "mvc", 3))) { + char prof_string[3]; + u8 prof; + strncpy(prof_string, vid_type+5, 2); + prof_string[2]=0; + prof = atoi(prof_string); + //Main 10 + if (prof==0x6E) { + rep->playback.disabled = 1; + continue; + } + } + } rep->playback.disabled = 0; if (rep->width>set->max_width) { @@ -2932,10 +3113,10 @@ static GF_Err gf_dash_setup_period(GF_DashClient *dash) u32 nb_rep_ok = 0; GF_DASH_Group *group = gf_list_get(dash->groups, group_i); -#ifdef DEBUG_SKIP_FIRST_SET - if (group_i==0) + if ((dash->debug_group_index>=0) && (group_i != (u32) dash->debug_group_index)) { + group->selection = GF_DASH_GROUP_NOT_SELECTABLE; continue; -#endif + } nb_rep = gf_list_count(group->adaptation_set->representations); @@ -3048,10 +3229,6 @@ static GF_Err gf_dash_setup_period(GF_DashClient *dash) group->selection = GF_DASH_GROUP_NOT_SELECTED; nb_groups_ok++; - -#ifdef DEBUG_FIRST_SET_ONLY - break; -#endif } period = gf_list_get(dash->mpd->periods, dash->active_period_index); @@ -3104,6 +3281,8 @@ static void dash_do_rate_adaptation(GF_DashClient *dash, GF_DASH_Group *group, G for (k=0; kadaptation_set->representations); k++) { GF_MPD_Representation *arep = gf_list_get(group->adaptation_set->representations, k); + if (arep->playback.disabled) continue; + if (dl_rate >= arep->bandwidth) { if (!new_rep) new_rep = arep; else if (go_up) { @@ -3165,9 +3344,12 @@ restart_period: group_count = gf_list_count(dash->groups); for (i=0; igroups, i); - if (group->selection != GF_DASH_GROUP_SELECTED) continue; + if (group->selection==GF_DASH_GROUP_NOT_SELECTABLE) + continue; + + //by default all groups are started (init seg download and buffering). They will be (de)selected by the user if (first_period_in_mpd) { - gf_dash_buffer_on(group, dash); + gf_dash_buffer_on(group); } e = gf_dash_download_init_segment(dash, group); if (e) break; @@ -3219,7 +3401,7 @@ restart_period: if (dash->force_mpd_update || (dash->mpd->minimum_update_period && (timer > dash->mpd->minimum_update_period))) { u32 diff = gf_sys_clock(); dash->force_mpd_update = 0; - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] At %d Time to update the playlist (%u ms elapsed since last refresh and min reoad rate is %u)\n", gf_sys_clock() , timer, dash->mpd->minimum_update_period)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] At %d Time to update the playlist (%u ms elapsed since last refresh and min reload rate is %u)\n", gf_sys_clock() , timer, dash->mpd->minimum_update_period)); e = gf_dash_update_manifest(dash); group_count = gf_list_count(dash->groups); diff = gf_sys_clock() - diff; @@ -3287,8 +3469,15 @@ restart_period: Bool use_byterange; u32 representation_index; u32 clock_time; + Bool empty_file = GF_FALSE; GF_DASH_Group *group = gf_list_get(dash->groups, i); - if (group->selection != GF_DASH_GROUP_SELECTED) continue; + + if (group->selection != GF_DASH_GROUP_SELECTED) { + if (group->nb_cached_segments) { + gf_dash_group_reset(dash, group); + } + continue; + } if (group->done) continue; if (group->nb_cached_segments>=group->max_cached_segments) { @@ -3388,7 +3577,7 @@ restart_period: /*if segment AST is greater than now, it is not yet available - we would need an estimate on how long the request takes to be sent to the server in order to be more reactive ...*/ if (to_wait > 1) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Set #%d At %d Next segment %d (AST "LLD") is not yet available on server - requesting later in %d ms\n", i+1, gf_sys_clock(), group->download_segment_index + start_number, segment_ast, to_wait)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Set #%d At %d Next segment %d (AST "LLD" - sec in period %g) is not yet available on server - requesting later in %d ms\n", i+1, gf_sys_clock(), group->download_segment_index + start_number, segment_ast, (segment_ast - group->period->start - group->ast_at_init)/1000.0, to_wait)); if (group->last_segment_time) { GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] %d ms elapsed since previous segment download\n", clock_time - group->last_segment_time)); } @@ -3397,7 +3586,7 @@ restart_period: min_wait = to_wait; continue; } else { - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Set #%d At %d Next segment %d (AST "LLD") should now be available on server since %d ms - requesting it\n", i+1, gf_sys_clock(), group->download_segment_index + start_number, segment_ast, -to_wait)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Set #%d At %d Next segment %d (AST "LLD" - sec in period %g) should now be available on server since %d ms - requesting it\n", i+1, gf_sys_clock(), group->download_segment_index + start_number, segment_ast, (segment_ast - group->period->start - group->ast_at_init)/1000.0, -to_wait)); if (group->last_segment_time) { GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] %d ms elapsed since previous segment download\n", clock_time - group->last_segment_time)); @@ -3431,7 +3620,7 @@ restart_period: e = GF_OK; /*do not erase local files*/ group->local_files = 1; - gf_dash_buffer_off(group, dash); + gf_dash_buffer_off(group); if (group->force_switch_bandwidth && !dash->auto_switch_count) { gf_dash_switch_group_representation(dash, group); /*restart*/ @@ -3527,7 +3716,10 @@ restart_period: if (group->segment_must_be_streamed) local_file_name = dash->dash_io->get_url(dash->dash_io, group->segment_download); else local_file_name = dash->dash_io->get_cache_name(dash->dash_io, group->segment_download); - + + if (dash->dash_io->get_total_size(dash->dash_io, group->segment_download)==0) { + empty_file = GF_TRUE; + } resource_name = dash->dash_io->get_url(dash->dash_io, group->segment_download); if (!dash->auto_switch_count) @@ -3538,22 +3730,26 @@ restart_period: gf_mx_p(dash->dl_mutex); assert(group->nb_cached_segmentsmax_cached_segments); assert( local_file_name ); - group->cached[group->nb_cached_segments].cache = gf_strdup(local_file_name); - group->cached[group->nb_cached_segments].url = gf_strdup( resource_name ); - group->cached[group->nb_cached_segments].start_range = 0; - group->cached[group->nb_cached_segments].end_range = 0; - group->cached[group->nb_cached_segments].representation_index = representation_index; - group->cached[group->nb_cached_segments].duration = (u32) group->current_downloaded_segment_duration; - group->cached[group->nb_cached_segments].loop_detected = group->loop_detected; - group->loop_detected = GF_FALSE; - - if (group->local_files && use_byterange) { - group->cached[group->nb_cached_segments].start_range = start_range; - group->cached[group->nb_cached_segments].end_range = end_range; + if (! empty_file) { + + group->cached[group->nb_cached_segments].cache = gf_strdup(local_file_name); + group->cached[group->nb_cached_segments].url = gf_strdup( resource_name ); + group->cached[group->nb_cached_segments].start_range = 0; + group->cached[group->nb_cached_segments].end_range = 0; + group->cached[group->nb_cached_segments].representation_index = representation_index; + group->cached[group->nb_cached_segments].duration = (u32) group->current_downloaded_segment_duration; + group->cached[group->nb_cached_segments].loop_detected = group->loop_detected; + group->loop_detected = GF_FALSE; + + if (group->local_files && use_byterange) { + group->cached[group->nb_cached_segments].start_range = start_range; + group->cached[group->nb_cached_segments].end_range = end_range; + } + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Added file to cache (%u/%u in cache): %s\n", group->nb_cached_segments+1, group->max_cached_segments, group->cached[group->nb_cached_segments].url)); + group->nb_cached_segments++; + gf_dash_update_buffering(group, dash); } - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Added file to cache (%u/%u in cache): %s\n", group->nb_cached_segments+1, group->max_cached_segments, group->cached[group->nb_cached_segments].url)); - group->nb_cached_segments++; - gf_dash_update_buffering(group, dash); + /* download enhancement representation of this segment*/ if ((representation_index != group->force_max_rep_index) && rep->enhancement_rep_index_plus_one) group->active_rep_index = rep->enhancement_rep_index_plus_one - 1; @@ -4077,6 +4273,14 @@ Bool gf_dash_is_group_selected(GF_DashClient *dash, u32 idx) return (group->selection == GF_DASH_GROUP_SELECTED) ? 1 : 0; } +GF_EXPORT +Bool gf_dash_is_group_selectable(GF_DashClient *dash, u32 idx) +{ + GF_DASH_Group *group = gf_list_get(dash->groups, idx); + if (!group) return 0; + return (group->selection == GF_DASH_GROUP_NOT_SELECTABLE) ? 0 : 1; +} + GF_EXPORT void gf_dash_get_info(GF_DashClient *dash, const char **title, const char **source) { @@ -4284,11 +4488,14 @@ const char *gf_dash_group_get_segment_init_url(GF_DashClient *dash, u32 idx, u64 GF_EXPORT void gf_dash_group_select(GF_DashClient *dash, u32 idx, Bool select) { + Bool needs_resetup = 0; GF_DASH_Group *group = gf_list_get(dash->groups, idx); if (!group) return; if (group->selection == GF_DASH_GROUP_NOT_SELECTABLE) return; + if ((group->selection==GF_DASH_GROUP_NOT_SELECTED) && select) needs_resetup = 1; + group->selection = select ? GF_DASH_GROUP_SELECTED : GF_DASH_GROUP_NOT_SELECTED; /*this set is part of a group, make sure no all other sets from the indicated group are unselected*/ if (select && (group->adaptation_set->group>=0)) { @@ -4306,6 +4513,10 @@ void gf_dash_group_select(GF_DashClient *dash, u32 idx, Bool select) } } } + //TODO: recompute grop download index based on current playback ... + if (needs_resetup) { + + } } GF_EXPORT @@ -4860,15 +5071,29 @@ GF_Err gf_dash_resync_to_segment(GF_DashClient *dash, const char *latest_segment } GF_EXPORT -GF_Err gf_dash_set_max_resolution(GF_DashClient *dash, u32 width, u32 height) +GF_Err gf_dash_set_max_resolution(GF_DashClient *dash, u32 width, u32 height, u8 max_display_bpp) { if (dash) { dash->max_width = width; dash->max_height = height; + dash->max_bit_per_pixel = max_display_bpp; return GF_OK; } return GF_BAD_PARAM; } +GF_EXPORT +void gf_dash_debug_group(GF_DashClient *dash, s32 group_index) +{ + dash->debug_group_index = group_index; +} + +GF_EXPORT +void gf_dash_set_user_buffer(GF_DashClient *dash, u32 buffer_time_ms) +{ + if (dash) dash->user_buffer_ms = buffer_time_ms; +} + + #endif //GPAC_DISABLE_DASH_CLIENT diff --git a/src/media_tools/dash_segmenter.c b/src/media_tools/dash_segmenter.c index 7417fc6..b6a0b0b 100644 --- a/src/media_tools/dash_segmenter.c +++ b/src/media_tools/dash_segmenter.c @@ -53,7 +53,7 @@ struct _dash_component /*for audio*/ u32 sample_rate, channels; - /*for anything*/ + /*apply to any media. We use 5 bytes because we may use copy data converted from gf_4cc_to_str which is 5 bytes*/ char szLang[5]; }; @@ -90,6 +90,7 @@ typedef struct u32 initial_moof_sn; u64 initial_tfdt; Bool no_fragments_defaults; + Bool samplegroups_in_traf; } GF_DASHSegmenterOptions; struct _dash_segment_input @@ -484,7 +485,7 @@ static GF_Err gf_media_isom_segment_file(GF_ISOFile *input, const char *output_f u64 MaxFragmentDuration, MaxSegmentDuration, SegmentDuration, maxFragDurationOverSegment; u32 presentationTimeOffset = 0; Double segment_start_time, file_duration, period_duration, max_segment_duration; - u32 nb_segments, width, height, sample_rate, nb_channels, sar_w, sar_h, fps_num, fps_denum, startNumber, startNumberRewind; + u32 nb_segments, width, height, sample_rate, nb_channels, sar_w, sar_h, fps_num, fps_denum, startNumber; char langCode[5]; u32 index_start_range, index_end_range; Bool force_switch_segment = GF_FALSE; @@ -549,7 +550,6 @@ static GF_Err gf_media_isom_segment_file(GF_ISOFile *input, const char *output_f file_duration = 0; startNumber = 1; - startNumberRewind = 0; //create output file /*need to precompute bandwidth*/ @@ -570,20 +570,6 @@ static GF_Err gf_media_isom_segment_file(GF_ISOFile *input, const char *output_f store_dash_params=GF_TRUE; gf_cfg_set_key(dash_cfg->dash_ctx, RepSecName, "ID", dash_input->representationID); } - //we no longer support start number changes -#if 0 - /*we are in time shift enabled mode so segments will get destroyed, set the start number to the current segment - and restore presentationTimeOffset (cf below)*/ - if (!store_dash_params && (dash_cfg->time_shift_depth >= 0)) { - opt = gf_cfg_get_key(dash_cfg->dash_ctx, RepSecName, "NextSegmentIndex"); - sscanf(opt, "%u", &startNumber); - - /*adjust the startNumber according to the timeShiftBuffer depth*/ - if ((dash_cfg->time_shift_depth>0) && (startNumber>(u32)dash_cfg->time_shift_depth) ) { - startNumberRewind = dash_cfg->time_shift_depth; - } - } -#endif } opt = dash_cfg->dash_ctx ? gf_cfg_get_key(dash_cfg->dash_ctx, RepSecName, "InitializationSegment") : NULL; @@ -700,6 +686,17 @@ static GF_Err gf_media_isom_segment_file(GF_ISOFile *input, const char *output_f if (gf_isom_is_track_in_root_od(input, i+1)) gf_isom_add_track_to_root_od(output, TrackNum); + /*remove sgpd in stbl; it wuold be in traf*/ + if (dash_cfg->samplegroups_in_traf) { + GF_TrackBox *trak = (GF_TrackBox *)gf_isom_get_track_from_file(output, TrackNum); + if (!trak) continue; + while (gf_list_count(trak->Media->information->sampleTable->sampleGroupsDescription)) { + GF_Box* box = (GF_Box*)gf_list_get(trak->Media->information->sampleTable->sampleGroupsDescription, 0); + gf_isom_box_del(box); + gf_list_rem(trak->Media->information->sampleTable->sampleGroupsDescription, 0); + } + } + // Commenting it the code for Timed Text tracks, it may happen that we have only one long sample, fragmentation is useful #if 0 //if only one sample, don't fragment track @@ -1258,7 +1255,7 @@ restart_fragmentation_pass: if (e) goto err_exit; /*copy subsample information*/ - e = gf_isom_fragment_copy_subsample(output, tf->TrackID, input, tf->OriginalTrack, tf->SampleNum + 1); + e = gf_isom_fragment_copy_subsample(output, tf->TrackID, input, tf->OriginalTrack, tf->SampleNum + 1, dash_cfg->samplegroups_in_traf); if (e) goto err_exit; @@ -1433,9 +1430,9 @@ restart_fragmentation_pass: max_segment_duration = (Double) (s64) SegmentDuration; max_segment_duration /= dash_cfg->dash_scale; } - force_switch_segment=GF_FALSE; - switch_segment=GF_TRUE; - SegmentDuration=GF_FALSE; + force_switch_segment = GF_FALSE; + switch_segment = GF_TRUE; + SegmentDuration = 0; split_at_rap = GF_FALSE; has_rap = GF_FALSE; /*restore fragment duration*/ @@ -1589,8 +1586,10 @@ restart_fragmentation_pass: if (!dash_cfg->variable_seg_rad_name && first_in_set) { const char *rad_name = gf_url_get_resource_name(seg_rad_name); gf_media_mpd_format_segment_name(GF_DASH_TEMPLATE_TEMPLATE, is_bs_switching, SegmentName, output_file, dash_input->representationID, rad_name, !stricmp(seg_ext, "null") ? NULL : seg_ext, 0, 0, 0, dash_cfg->use_segment_timeline); - fprintf(dash_cfg->mpd, " dash_scale : mpd_timescale, SegmentName, startNumber - startNumberRewind); + fprintf(dash_cfg->mpd, " dash_scale : mpd_timescale, SegmentName, startNumber); if (!mpd_timeline_bs) { + if (!max_segment_duration) + max_segment_duration = dash_cfg->segment_duration; fprintf(dash_cfg->mpd, " duration=\"%d\"", (u32) (max_segment_duration * mpd_timescale)); } /*in BS switching we share the same IS for all reps*/ @@ -1733,7 +1732,7 @@ restart_fragmentation_pass: if (dash_cfg->variable_seg_rad_name) { const char *rad_name = gf_url_get_resource_name(seg_rad_name); gf_media_mpd_format_segment_name(GF_DASH_TEMPLATE_TEMPLATE, is_bs_switching, SegmentName, output_file, dash_input->representationID, rad_name, !stricmp(seg_ext, "null") ? NULL : seg_ext, 0, bandwidth, 0, dash_cfg->use_segment_timeline); - fprintf(dash_cfg->mpd, " mpd, " representationID, rad_name, !stricmp(seg_ext, "null") ? NULL : "mp4", 0, 0, 0, dash_cfg->use_segment_timeline); fprintf(dash_cfg->mpd, " initialization=\"%s\"", SegmentName); @@ -3036,7 +3035,7 @@ static GF_Err dasher_mp2t_segment_file(GF_DashSegInput *dash_input, const char * char szSectionName[100], szRepURLsSecName[100]; char szCodecs[100]; const char *opt; - u32 i, startNumberRewind; + u32 i; GF_Err e; u64 start, pcr_shift, next_pcr_shift; Double cumulated_duration = 0; @@ -3057,7 +3056,6 @@ static GF_Err dasher_mp2t_segment_file(GF_DashSegInput *dash_input, const char * /*create bitstreams*/ segment_index = 1; - startNumberRewind = 0; ts_seg.index_file = NULL; ts_seg.index_bs = NULL; if (!dash_cfg->dash_ctx && (dash_cfg->use_url_template != 2)) { @@ -3169,12 +3167,6 @@ static GF_Err dasher_mp2t_segment_file(GF_DashSegInput *dash_input, const char * opt = gf_cfg_get_key(dash_cfg->dash_ctx, szSectionName, "StartIndex"); if (opt) sscanf(opt, "%u", &segment_index); - /*adjust the startNumber according to the timeShiftBuffer depth*/ - if ((dash_cfg->time_shift_depth>0) && (segment_index > (u32)dash_cfg->time_shift_depth) ) { - startNumberRewind = dash_cfg->time_shift_depth; - } - - opt = gf_cfg_get_key(dash_cfg->dash_ctx, szSectionName, "PCR90kOffset"); if (opt) sscanf(opt, LLU, &pcr_shift); @@ -3186,7 +3178,7 @@ static GF_Err dasher_mp2t_segment_file(GF_DashSegInput *dash_input, const char * /*write segment template for all representations*/ if (first_in_set && dash_cfg->seg_rad_name && dash_cfg->use_url_template && !dash_cfg->variable_seg_rad_name) { gf_media_mpd_format_segment_name(GF_DASH_TEMPLATE_TEMPLATE, 1, SegName, basename, dash_input->representationID, gf_url_get_resource_name(dash_cfg->seg_rad_name), "ts", 0, bandwidth, segment_index, dash_cfg->use_segment_timeline); - fprintf(dash_cfg->mpd, " segment_duration), segment_index - startNumberRewind, SegName); + fprintf(dash_cfg->mpd, " segment_duration), segment_index, SegName); if (!dash_cfg->dash_ctx) { gf_media_mpd_format_segment_name(GF_DASH_TEMPLATE_INITIALIZATION_TEMPLATE, 1, IdxName, basename, dash_input->representationID, gf_url_get_resource_name(dash_cfg->seg_rad_name), "six", 0, bandwidth, segment_index, dash_cfg->use_segment_timeline); fprintf(dash_cfg->mpd, " index=\"%s\"", IdxName); @@ -3631,12 +3623,11 @@ static GF_Err write_mpd_header(FILE *mpd, const char *mpd_name, GF_Config *dash_ /*TODO what should we put for minBufferTime */ fprintf(mpd, "isom, track->track_num, 1); + GF_DecoderConfig *dcd; + //use inspect mode so that we don't aggregate xPS from the base in the enhancement ESD + gf_isom_set_nalu_extract_mode(streamer->isom, track->track_num, GF_ISOM_NALU_EXTRACT_INSPECT); + dcd = gf_isom_get_decoder_config(streamer->isom, track->track_num, 1); if (dcd && dcd->decoderSpecificInfo) { dsi = dcd->decoderSpecificInfo->data; @@ -552,8 +555,9 @@ GF_ISOMRTPStreamer *gf_isom_streamer_new(const char *file_name, const char *ip_d case GF_ISOM_SUBTYPE_HEV1: case GF_ISOM_SUBTYPE_HVC2: case GF_ISOM_SUBTYPE_HEV2: + case GF_ISOM_SUBTYPE_SHC1: { - GF_HEVCConfig *hevcc = NULL; + GF_HEVCConfig *hevcc = NULL, *shvcc = NULL; hevcc = gf_isom_hevc_config_get(streamer->isom, track->track_num, 1); if (hevcc) { track->avc_nalu_size = hevcc->nal_unit_size; @@ -561,6 +565,13 @@ GF_ISOMRTPStreamer *gf_isom_streamer_new(const char *file_name, const char *ip_d streamType = GF_STREAM_VISUAL; oti = GPAC_OTI_VIDEO_HEVC; } + shvcc = gf_isom_shvc_config_get(streamer->isom, track->track_num, 1); + if (shvcc) { + track->avc_nalu_size = shvcc->nal_unit_size; + gf_odf_hevc_cfg_del(shvcc); + streamType = GF_STREAM_VISUAL; + oti = GPAC_OTI_VIDEO_SHVC; + } flags |= GP_RTP_PCK_USE_MULTI; break; } diff --git a/src/media_tools/html5_media.c b/src/media_tools/html5_media.c index 1f2bed0..27b3721 100644 --- a/src/media_tools/html5_media.c +++ b/src/media_tools/html5_media.c @@ -31,35 +31,243 @@ #include #include +GF_HTML_MediaTimeRanges *gf_html_timeranges_new(u32 timescale) +{ + GF_HTML_MediaTimeRanges *ranges; + GF_SAFEALLOC(ranges, GF_HTML_MediaTimeRanges); + ranges->times = gf_list_new(); + ranges->timescale = timescale; + return ranges; +} -GF_Err gf_media_time_ranges_add(GF_HTML_MediaTimeRanges *timeranges, double start, double end) +static GF_Err gf_html_timeranges_add_time(GF_HTML_MediaTimeRanges *timeranges, u64 time) { - double *d; + u64 *t; if (!timeranges) return GF_BAD_PARAM; - d = (double *)gf_malloc(sizeof(double)); - *d = start; - gf_list_add(timeranges->times, d); - d = (double *)gf_malloc(sizeof(double)); - *d = end; - gf_list_add(timeranges->times, d); + t = (u64 *)gf_malloc(sizeof(u64)); + *t = time; + gf_list_add(timeranges->times, t); return GF_OK; } -void gf_html_timeranges_reset(GF_HTML_MediaTimeRanges *range) +GF_Err gf_html_timeranges_add_start(GF_HTML_MediaTimeRanges *timeranges, u64 start) +{ + return gf_html_timeranges_add_time(timeranges, start); +} + +GF_Err gf_html_timeranges_add_end(GF_HTML_MediaTimeRanges *timeranges, u64 end) { - while (gf_list_count(range->times)) + return gf_html_timeranges_add_time(timeranges, end); +} + +void gf_html_timeranges_reset(GF_HTML_MediaTimeRanges *ranges) +{ + while (gf_list_count(ranges->times)) { - double *d = (double *)gf_list_get(range->times, 0); + u64 *d = (u64 *)gf_list_get(ranges->times, 0); gf_free(d); - gf_list_rem(range->times, 0); + gf_list_rem(ranges->times, 0); } } -void gf_html_timeranges_del(GF_HTML_MediaTimeRanges *range) +void gf_html_timeranges_del(GF_HTML_MediaTimeRanges *ranges) +{ + gf_html_timeranges_reset(ranges); + gf_list_del(ranges->times); + ranges->times = NULL; + gf_free(ranges); +} + +void gf_html_timeranges_merge(GF_HTML_MediaTimeRanges *ranges) { + u32 i, count; + u64 *start; + u64 *end; + u64 *prev_end; + GF_List *merged = gf_list_new(); + + prev_end = NULL; + count = gf_list_count(ranges->times); + for (i = 0; i < count; i+=2) { + start = (u64 *)gf_list_get(ranges->times, i); + end = (u64 *)gf_list_get(ranges->times, i+1); + if (prev_end == NULL || *start > *prev_end) { + if (prev_end) { + gf_list_add(merged, prev_end); + } + gf_list_add(merged, start); + } else if (*start == *prev_end) { + gf_free(start); + } + prev_end = end; + } + if (prev_end) { + gf_list_add(ranges->times, prev_end); + } + gf_list_del(ranges->times); + ranges->times = merged; +} + + +GF_HTML_MediaTimeRanges *gf_html_timeranges_union(GF_HTML_MediaTimeRanges *a, GF_HTML_MediaTimeRanges *b) +{ + GF_HTML_MediaTimeRanges *union_ranges; + u32 i, j, count_a, count_b; + union_ranges = gf_html_timeranges_new(a->timescale); + union_ranges->c = a->c; + union_ranges->_this = a->_this; + + count_a = gf_list_count(a->times); + if (b) { + count_b = gf_list_count(b->times); + } else { + count_b = 0; + } + if (count_a == 0 && count_b == 0) { + return NULL; + } else if (count_a == 0) { + GF_HTML_MediaTimeRanges *tmp = a; + a = b; + b = tmp; + count_a = count_b; + count_b = 0; + } + i = 0; + j = 0; + while (i < count_a) { + Bool add_a = GF_TRUE; + u64 *starta = (u64 *)gf_list_get(a->times, i); + u64 *enda = (u64 *)gf_list_get(a->times, i+1); + while (j < count_b) { + u64 *startb = (u64 *)gf_list_get(b->times, j); + u64 *endb = (u64 *)gf_list_get(b->times, j+1); + if (*enda*b->timescale < *startb*a->timescale) { + /* a ends before b starts, there is no overlap, we can add a to the union */ + gf_list_add(union_ranges->times, starta); + gf_list_add(union_ranges->times, enda); + add_a = GF_FALSE; + /* force to get the next a */ + i+=2; + break; + } else if (*endb*a->timescale < *starta*b->timescale) { + /* b ends before a starts, there is no overlap, we can add b to the union */ + *startb = (u64)((*startb * a->timescale)*1.0 / b->timescale); + gf_list_add(union_ranges->times, startb); + *endb = (u64)((*endb * a->timescale)*1.0 / b->timescale); + gf_list_add(union_ranges->times, endb); + j+=2; + } else { /* there is some overlap */ + if (*starta*b->timescale <= *startb*a->timescale) { /* the overlap is at the end of a */ + if (*endb*a->timescale <= *enda*b->timescale) { /* b is contained in a */ + /* ignore b, move on to the next b */ + j+=2; + } else { /* *endb > *enda, the overlap is only at the start of b */ + /* update start of b */ + *startb = (u64)((*starta * b->timescale)*1.0 / a->timescale); + /* ignore a, move on to the next a */ + i+=2; + break; + } + } else { /* *starta > *startb, the overlap is at the end of b */ + if (*enda*b->timescale <= *endb*a->timescale) { /* a is contained in b */ + /* ignore a */ + add_a = GF_FALSE; + /* force to get the next a */ + i+=2; + break; + } else { /* *enda > *endb, the overlap is at the beginning of a */ + /* update start of a */ + *starta = (u64)((*startb * a->timescale)*1.0 / b->timescale); + /* ignore b, move on to the next b */ + j+=2; + } + } + } + } + /* we've processed all b, but a has not been added */ + /* first check if the next a is not contiguous */ + if (add_a == GF_TRUE && i+2 < count_a) { + u64 *next_starta = (u64 *)gf_list_get(a->times, i+2); + //u64 *next_enda = (u64 *)gf_list_get(a->times, i+3); + if (*enda == *next_starta) { + *next_starta = *starta; + } + add_a = GF_FALSE; + } + if (add_a) { + gf_list_add(union_ranges->times, starta); + gf_list_add(union_ranges->times, enda); + } + i+=2; + } + gf_html_timeranges_merge(union_ranges); + return union_ranges; +} + +GF_HTML_MediaTimeRanges *gf_html_timeranges_intersection(GF_HTML_MediaTimeRanges *a, GF_HTML_MediaTimeRanges *b) { - gf_html_timeranges_reset(range); - gf_list_del(range->times); - range->times = NULL; + GF_HTML_MediaTimeRanges *intersection_ranges; + u32 i, j, count_a, count_b; + intersection_ranges = gf_html_timeranges_new(a->timescale); + intersection_ranges->c = a->c; + intersection_ranges->_this = a->_this; + count_a = 0; + count_b = 0; + if (a) count_a = gf_list_count(a->times); + if (b) count_b = gf_list_count(b->times); + if (count_a != 0 && count_b != 0) { + i = 0; + j = 0; + while (i < count_a) { + u64 *starta = (u64 *)gf_list_get(a->times, i); + u64 *enda = (u64 *)gf_list_get(a->times, i+1); + while (j < count_b) { + u64 *startb = (u64 *)gf_list_get(b->times, j); + u64 *endb = (u64 *)gf_list_get(b->times, j+1); + if (*enda*b->timescale < *startb*a->timescale) { + /* this is no intersection with this a */ + /* force to get the next a */ + i+=2; + break; + } else if (*endb*a->timescale < *starta*b->timescale) { + /* this is no intersection with this b */ + j+=2; + } else { /* there is an intersection */ + if (*starta*b->timescale <= *startb*a->timescale) { /* the intersection starts at the beginning of b */ + gf_list_add(intersection_ranges->times, startb); + if (*endb*a->timescale <= *enda*b->timescale) { /* b is contained in a */ + gf_list_add(intersection_ranges->times, endb); + *starta = (u64)((*endb * a->timescale)*1.0 / b->timescale); + /* move on to the next b */ + j+=2; + } else { /* *endb > *enda, the intersection ends at the end of a */ + gf_list_add(intersection_ranges->times, enda); + /* update start of b */ + *startb = (u64)((*enda * b->timescale)*1.0 / a->timescale); + /* move on to the next a */ + i+=2; + break; + } + } else { /* *starta > *startb, the intersection starts at the beginning of a */ + gf_list_add(intersection_ranges->times, startb); + if (*enda*b->timescale <= *endb*a->timescale) { /* a is contained in b */ + gf_list_add(intersection_ranges->times, enda); + *startb = (u64)((*enda * b->timescale)*1.0 / a->timescale); + /* move on to the next a */ + i+=2; + break; + } else { /* *enda > *endb, the intersection ends at the end of b */ + gf_list_add(intersection_ranges->times, endb); + /* update start of a */ + *starta = (u64)((*endb * a->timescale)*1.0 / b->timescale); + /* move on to the next b */ + j+=2; + } + } + } + } + } + } + return intersection_ranges; } GF_HTML_Track *html_media_add_new_track_to_list(GF_HTML_TrackList *tracklist, @@ -165,9 +373,9 @@ GF_HTML_MediaElement *gf_html_media_element_new(GF_Node *media_node, GF_HTML_Med me->audioTracks.tracks = gf_list_new(); me->videoTracks.tracks = gf_list_new(); me->textTracks.tracks = gf_list_new(); - me->buffered.times = gf_list_new(); - me->played.times = gf_list_new(); - me->seekable.times = gf_list_new(); + me->buffered = gf_html_timeranges_new(1); + me->played = gf_html_timeranges_new(1); + me->seekable = gf_html_timeranges_new(1); return me; } @@ -178,9 +386,9 @@ void gf_html_media_element_del(GF_HTML_MediaElement *me) gf_html_tracklist_del(&me->audioTracks); gf_html_tracklist_del(&me->videoTracks); gf_html_tracklist_del(&me->textTracks); - gf_html_timeranges_del(&me->buffered); - gf_html_timeranges_del(&me->seekable); - gf_html_timeranges_del(&me->played); + gf_html_timeranges_del(me->buffered); + gf_html_timeranges_del(me->seekable); + gf_html_timeranges_del(me->played); gf_free(me); } @@ -188,9 +396,9 @@ GF_HTML_MediaController *gf_html_media_controller_new() { GF_HTML_MediaController *mc; GF_SAFEALLOC(mc, GF_HTML_MediaController); - mc->buffered.times = gf_list_new(); - mc->played.times = gf_list_new(); - mc->seekable.times = gf_list_new(); + mc->buffered = gf_html_timeranges_new(1); + mc->played = gf_html_timeranges_new(1); + mc->seekable = gf_html_timeranges_new(1); return mc; } @@ -203,27 +411,11 @@ void gf_html_mediacontroller_del(GF_HTML_MediaController *mc) GF_HTML_MediaElement *me = (GF_HTML_MediaElement *)gf_list_get(mc->media_elements, i); me->controller = NULL; } - gf_html_timeranges_del(&mc->buffered); - gf_html_timeranges_del(&mc->seekable); - gf_html_timeranges_del(&mc->played); + gf_html_timeranges_del(mc->buffered); + gf_html_timeranges_del(mc->seekable); + gf_html_timeranges_del(mc->played); gf_free(mc); } #endif -GF_DOMEventTarget *gf_html_media_get_event_target_from_node(GF_Node *n) { - GF_DOMEventTarget *target = NULL; - //GF_HTML_MediaElement *me = html_media_element_get_from_node(c, n); - //*target = me->evt_target; -#ifndef GPAC_DISABLE_SVG - if (!n->sgprivate->interact) { - GF_SAFEALLOC(n->sgprivate->interact, struct _node_interactive_ext); - } - if (!n->sgprivate->interact->dom_evt) { - n->sgprivate->interact->dom_evt = gf_dom_event_target_new(GF_DOM_EVENT_TARGET_HTML_MEDIA, n); - } - target = n->sgprivate->interact->dom_evt; -#endif - return target; -} - diff --git a/src/media_tools/html5_mse.c b/src/media_tools/html5_mse.c index ab8190b..d4e3d44 100644 --- a/src/media_tools/html5_mse.c +++ b/src/media_tools/html5_mse.c @@ -40,6 +40,7 @@ GF_HTML_MediaSource *gf_mse_media_source_new() ms->activeSourceBuffers.evt_target = gf_dom_event_target_new(GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFERLIST, &ms->activeSourceBuffers); ms->reference_count = 1; ms->evt_target = gf_dom_event_target_new(GF_DOM_EVENT_TARGET_MSE_MEDIASOURCE, ms); + ms->durationType = DURATION_NAN; return ms; } @@ -76,7 +77,7 @@ void gf_mse_mediasource_del(GF_HTML_MediaSource *ms, Bool del_js) } } -static void gf_mse_fire_event(GF_DOMEventTarget *target, GF_EventType event_type) +void gf_mse_fire_event(GF_DOMEventTarget *target, GF_EventType event_type) { GF_SceneGraph *sg = NULL; GF_DOM_Event mse_event; @@ -107,7 +108,7 @@ static void gf_mse_fire_event(GF_DOMEventTarget *target, GF_EventType event_type break; } assert(sg); - sg_fire_dom_event(target, &mse_event, sg, NULL); + gf_sg_fire_dom_event(target, &mse_event, sg, NULL); } GF_EXPORT @@ -148,24 +149,42 @@ GF_HTML_SourceBuffer *gf_mse_source_buffer_new(GF_HTML_MediaSource *mediasource) GF_SAFEALLOC(source, GF_HTML_SourceBuffer); sprintf(name, "SourceBuffer_Thread_%p", source); source->mediasource = mediasource; - source->buffered.times = gf_list_new(); + source->buffered = gf_html_timeranges_new(1); source->input_buffer = gf_list_new(); source->tracks = gf_list_new(); + source->threads = gf_list_new(); source->parser_thread = gf_th_new(name); source->remove_thread = gf_th_new(name); source->append_mode = MEDIA_SOURCE_APPEND_MODE_SEGMENTS; source->appendWindowStart = 0; - source->appendWindowEnd = GF_MAX_DOUBLE; + source->appendWindowEnd = GF_MAX_DOUBLE; source->evt_target = gf_dom_event_target_new(GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFER, source); + source->timescale = 1; return source; } -void gf_mse_add_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb) +void gf_mse_mediasource_add_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb) { gf_list_add(ms->sourceBuffers.list, sb); gf_mse_fire_event(ms->sourceBuffers.evt_target, GF_EVENT_HTML_MSE_ADD_SOURCE_BUFFER); } +/* Not yet used +void gf_mse_add_active_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb) +{ + gf_list_add(ms->activeSourceBuffers.list, sb); + gf_mse_fire_event(ms->activeSourceBuffers.evt_target, GF_EVENT_HTML_MSE_ADD_SOURCE_BUFFER); +} */ + +void gf_mse_remove_active_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb) { + s32 activePos; + activePos = gf_list_find(ms->activeSourceBuffers.list, sb); + if (activePos >= 0) { + gf_list_rem(ms->activeSourceBuffers.list, activePos); + gf_mse_fire_event(ms->activeSourceBuffers.evt_target, GF_EVENT_HTML_MSE_REMOVE_SOURCE_BUFFER); + } +} + static void gf_mse_reset_input_buffer(GF_List *input_buffer) { while (gf_list_count(input_buffer)) { @@ -175,10 +194,74 @@ static void gf_mse_reset_input_buffer(GF_List *input_buffer) } } +/* Deletes all unparsed data buffers from all tracks in the source buffer */ +static void gf_mse_source_buffer_reset_parser(GF_HTML_SourceBuffer *sb) +{ + u32 i, track_count; + track_count = gf_list_count(sb->tracks); + + /* wait until all remaining entire AU are parsed and then flush the remaining bytes in the parser */ + + for (i = 0; i < track_count; i++) + { + GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(sb->tracks, i); + track->last_dts_set = GF_FALSE; + track->highest_pts_set = GF_FALSE; + track->needs_rap = GF_TRUE; + } + sb->group_end_timestamp_set = GF_FALSE; + gf_mse_reset_input_buffer(sb->input_buffer); + sb->append_state = MEDIA_SOURCE_APPEND_STATE_WAITING_FOR_SEGMENT; +} + +GF_Err gf_mse_source_buffer_abort(GF_HTML_SourceBuffer *sb) +{ + if (sb->updating) { + /* setting to false should stop the parsing thread */ + sb->updating = GF_FALSE; + gf_mse_fire_event(sb->evt_target, GF_EVENT_HTML_MSE_UPDATE_ABORT); + gf_mse_fire_event(sb->evt_target, GF_EVENT_HTML_MSE_UPDATE_END); + } + gf_mse_source_buffer_reset_parser(sb); + sb->appendWindowStart = 0; + sb->appendWindowEnd = GF_MAX_DOUBLE; + return GF_OK; +} + +GF_Err gf_mse_remove_source_buffer(GF_HTML_MediaSource *ms, GF_HTML_SourceBuffer *sb) { + s32 pos; + pos = gf_list_find(ms->sourceBuffers.list, sb); + if (pos < 0) { + return GF_NOT_FOUND; + } else { + gf_mse_source_buffer_abort(sb); + /* TODO: update the audio/video/text tracks */ + gf_mse_remove_active_source_buffer(ms, sb); + gf_list_rem(ms->sourceBuffers.list, pos); + gf_mse_fire_event(ms->sourceBuffers.evt_target, GF_EVENT_HTML_MSE_REMOVE_SOURCE_BUFFER); + gf_mse_source_buffer_del(sb); + } + return GF_OK; +} + +/* TODO: not yet used +void gf_mse_detach(GF_HTML_MediaSource *ms) { + u32 count; + u32 i; + GF_HTML_SourceBuffer *sb; + ms->readyState = MEDIA_SOURCE_READYSTATE_CLOSED; + ms->durationType = DURATION_NAN; + count = gf_list_count(ms->sourceBuffers.list); + for (i = 0; i < count; i++) { + sb = (GF_HTML_SourceBuffer *)gf_list_get(ms->sourceBuffers.list, i); + gf_mse_remove_source_buffer(ms, sb); + } +} */ + void gf_mse_source_buffer_del(GF_HTML_SourceBuffer *sb) { GF_HTML_TrackList tlist; - gf_html_timeranges_del(&sb->buffered); + gf_html_timeranges_del(sb->buffered); gf_mse_reset_input_buffer(sb->input_buffer); gf_list_del(sb->input_buffer); @@ -186,6 +269,15 @@ void gf_mse_source_buffer_del(GF_HTML_SourceBuffer *sb) tlist.tracks = sb->tracks; gf_html_tracklist_del(&tlist); + { + u32 i, count; + count = gf_list_count(sb->threads); + for(i = 0; i < count; i++) { + GF_Thread *t = (GF_Thread *)gf_list_get(sb->threads, i); + gf_th_del(t); + } + gf_list_del(sb->threads); + } gf_th_del(sb->parser_thread); gf_th_del(sb->remove_thread); @@ -326,113 +418,97 @@ static GF_Err gf_mse_source_buffer_store_track_desc(GF_HTML_SourceBuffer *sb, GF return GF_OK; } +#define SECONDS_TO_TIMESCALE(s) ((s)*track->timescale) +#define TIMESCALE_TO_SECONDS(u) ((u)*1.0/track->timescale) + + +GF_HTML_MediaTimeRanges *gf_mse_timeranges_from_track_packets(GF_HTML_Track *track) { + u32 i, count; + GF_HTML_MediaTimeRanges *ranges; + u64 start; + u64 end=0; + Bool end_set = GF_FALSE; + GF_MSE_Packet *packet; + + ranges = gf_html_timeranges_new(track->timescale); + count = gf_list_count(track->buffer); + for (i = 0; i < count; i++) { + packet = (GF_MSE_Packet *)gf_list_get(track->buffer, i); + if (end_set == GF_FALSE|| packet->sl_header.compositionTimeStamp > end) { + if (end_set == GF_TRUE) { + gf_html_timeranges_add_end(ranges, end); + } + start = packet->sl_header.compositionTimeStamp; + gf_html_timeranges_add_start(ranges, start); + end = packet->sl_header.compositionTimeStamp + packet->sl_header.au_duration; + end_set = GF_TRUE; + } else if (packet->sl_header.compositionTimeStamp == end) { + end = packet->sl_header.compositionTimeStamp + packet->sl_header.au_duration; + } + } + if (end_set == GF_TRUE) { + gf_html_timeranges_add_end(ranges, end); + } + return ranges; +} + /* Traverses the list of Access Units already demuxed & parsed to update the buffered status */ void gf_mse_source_buffer_update_buffered(GF_HTML_SourceBuffer *sb) { u32 i; u32 track_count; - double start= 0; - double end = 0; - Bool start_set = GF_FALSE; - Bool end_set = GF_FALSE; - u64 au_dur = 0; - double packet_start; - double packet_end; - - /* cleaning the current list */ - gf_html_timeranges_reset(&(sb->buffered)); - - /* merging the start and end for all tracks */ + track_count = gf_list_count(sb->tracks); + gf_html_timeranges_reset(sb->buffered); for (i = 0; i < track_count; i++) { - u32 j; - u32 packet_count; + GF_HTML_MediaTimeRanges *track_ranges; GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(sb->tracks, i); gf_mx_p(track->buffer_mutex); - packet_count = gf_list_count(track->buffer); - au_dur = 0; - for (j = 0; j < packet_count; j++) { - GF_MSE_Packet *packet = (GF_MSE_Packet *)gf_list_get(track->buffer, j); - if (packet) { - packet_start = (packet->sl_header.compositionTimeStamp * 1.0 )/ track->timescale; - if (packet->sl_header.au_duration) { - au_dur = packet->sl_header.au_duration; - } else { - if (j > 0) { - GF_MSE_Packet *prev = (GF_MSE_Packet *)gf_list_get(track->buffer, j-1); - au_dur = packet->sl_header.decodingTimeStamp - prev->sl_header.decodingTimeStamp; - } - } - packet_end = ((packet->sl_header.compositionTimeStamp + au_dur) * 1.0) / track->timescale; - if (!start_set) { - start = packet_start; - start_set = GF_TRUE; - } else { - if (start > packet_start) { - start = packet_start; - } - } - if (!end_set) { - end = packet_end; - end_set = GF_TRUE; - } else { - if (end < packet_end) { - end = packet_end; - } - } - } - } + track_ranges = gf_mse_timeranges_from_track_packets(track); + if (i != 0) { + GF_HTML_MediaTimeRanges *tmp; + tmp = gf_html_timeranges_intersection(sb->buffered, track_ranges); + gf_html_timeranges_del(track_ranges); + gf_list_del(sb->buffered->times); + sb->buffered->times = tmp->times; + sb->buffered->timescale = tmp->timescale; + gf_free(tmp); + } else { + gf_list_del(sb->buffered->times); + sb->buffered->times = track_ranges->times; + sb->buffered->timescale = track_ranges->timescale; + gf_free(track_ranges); + } gf_mx_v(track->buffer_mutex); } - - /* Creating only one range for now */ - if (start_set && end_set) { - gf_media_time_ranges_add(&sb->buffered, start, end); - } } -/* Deletes all unparsed data buffers from all tracks in the source buffer */ -static void gf_mse_source_buffer_reset_parser(GF_HTML_SourceBuffer *sb) -{ - u32 i, track_count; - track_count = gf_list_count(sb->tracks); - - /* wait until all remaining entire AU are parsed and then flush the remaining bytes in the parser */ - - for (i = 0; i < track_count; i++) - { - GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(sb->tracks, i); - track->last_dts_set = GF_FALSE; - track->highest_pts_set = GF_FALSE; - track->needs_rap = GF_TRUE; - } - sb->highest_end_timestamp_set = GF_FALSE; - gf_mse_reset_input_buffer(sb->input_buffer); - sb->append_state = MEDIA_SOURCE_APPEND_STATE_WAITING_FOR_SEGMENT; +void gf_mse_source_buffer_set_timestampOffset(GF_HTML_SourceBuffer *sb, double d) { + u32 i; + sb->timestampOffset = (s64)(d*sb->timescale); + if (sb->append_mode == MEDIA_SOURCE_APPEND_MODE_SEQUENCE) { + sb->group_start_timestamp_flag = GF_TRUE; + sb->group_start_timestamp = sb->timestampOffset; + } + for (i = 0; i < gf_list_count(sb->tracks); i++) { + GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(sb->tracks, i); + track->timestampOffset = sb->timestampOffset*track->timescale; + } } -GF_Err gf_mse_source_buffer_abort(GF_HTML_SourceBuffer *sb) -{ -/* -if (sb->continuation_timestamp_flag == GF_FALSE) - { - if (sb->abort_mode == MEDIA_SOURCE_ABORT_MODE_CONTINUATION && !sb->highest_end_timestamp_set) - { - return GF_BAD_PARAM; - } +void gf_mse_source_buffer_set_timescale(GF_HTML_SourceBuffer *sb, u32 new_timescale) { + u32 old_timescale = sb->timescale; + if (old_timescale == new_timescale) return; + sb->timescale = new_timescale; + sb->timestampOffset = (s64)((sb->timestampOffset * new_timescale * 1.0)/old_timescale); + if (sb->group_start_timestamp_flag) { + sb->group_start_timestamp = (u64)((sb->group_start_timestamp * new_timescale * 1.0)/old_timescale); + } + if (sb->group_end_timestamp_set) { + sb->group_end_timestamp = (u64)((sb->group_end_timestamp * new_timescale * 1.0)/old_timescale); + } + sb->remove_start = (u64)((sb->remove_start * new_timescale * 1.0)/old_timescale); + sb->remove_end = (u64)((sb->remove_end * new_timescale * 1.0)/old_timescale); - if (sb->highest_end_timestamp_set) { - sb->continuation_timestamp = sb->highest_end_timestamp; - sb->continuation_timestamp_flag = GF_TRUE; - } - } -// sb->abort_mode = mode; -*/ - gf_mse_source_buffer_set_update(sb, GF_FALSE); - sb->appendWindowStart = 0; - sb->appendWindowEnd = GF_MAX_DOUBLE; - /*fire abort event at the SourceBuffer */ - gf_mse_source_buffer_reset_parser(sb); - return GF_OK; } void gf_mse_packet_del(GF_MSE_Packet *packet) { @@ -457,6 +533,7 @@ static GF_MSE_Packet *gf_mse_find_overlapped_packet(GF_HTML_Track *tra found_previous = GF_TRUE; } if (found_previous == GF_TRUE && p->sl_header.compositionTimeStamp > packet->sl_header.compositionTimeStamp) { + gf_mx_v(track->buffer_mutex); return p; } } @@ -469,64 +546,142 @@ static void gf_mse_remove_frames_from_to(GF_HTML_Track *track, u64 to) { u32 i; - u32 frame_count; gf_mx_p(track->buffer_mutex); - frame_count = gf_list_count(track->buffer); - for (i = 0; i < frame_count; i++) { + i = 0; + while (i < gf_list_count(track->buffer)) { GF_MSE_Packet *frame = (GF_MSE_Packet *)gf_list_get(track->buffer, i); - if (frame->sl_header.compositionTimeStamp >= from && frame->sl_header.compositionTimeStamp < to) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Removing frame %g (%d frames)\n", (frame->sl_header.compositionTimeStamp*1.0)/track->timescale, gf_list_count(track->buffer))); + if (frame->sl_header.compositionTimeStamp >= to) { + break; + } else if (frame->sl_header.compositionTimeStamp >= from && frame->sl_header.compositionTimeStamp < to) { + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Removing frame with PTS %g s (%d frames remaining)\n", TIMESCALE_TO_SECONDS(frame->sl_header.compositionTimeStamp), gf_list_count(track->buffer))); gf_list_rem(track->buffer, i); - } + } else { + i++; + } } gf_mx_v(track->buffer_mutex); } +static void gf_mse_track_buffer_add_packet(GF_HTML_Track *track, GF_MSE_Packet *frame) +{ + u32 i, count; + Bool inserted = GF_FALSE; + + gf_mx_p(track->buffer_mutex); + /* TODO: improve insertion*/ + count = gf_list_count(track->buffer); + for (i = 0; i < count; i++) { + GF_MSE_Packet *next_frame = (GF_MSE_Packet *)gf_list_get(track->buffer, i); + if (frame->sl_header.decodingTimeStamp < next_frame->sl_header.decodingTimeStamp) { + gf_list_insert(track->buffer, frame, i); + /* if the frame had no duration, we can now tell its duration because of the next frame */ + if (!frame->sl_header.au_duration) { + frame->sl_header.au_duration = (u32)(next_frame->sl_header.decodingTimeStamp - frame->sl_header.decodingTimeStamp); + /* we need also to check the duration of the previous frame */ + if (i > 0) { + GF_MSE_Packet *prev_frame = (GF_MSE_Packet *)gf_list_get(track->buffer, i-1); + /* we update the frame duration if the newly inserted frame modifies it */ + if (!prev_frame->sl_header.au_duration || + prev_frame->sl_header.au_duration > frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp) { + prev_frame->sl_header.au_duration = (u32)(frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp); + } + } + } + inserted = GF_TRUE; + break; + } + } + if (!inserted) { + gf_list_add(track->buffer, frame); + /* if the frame is inserted last, we cannot know its duration until a new frame is appended or unless the transport format carried it */ + count = gf_list_count(track->buffer); + if (count > 1) { + GF_MSE_Packet *prev_frame = (GF_MSE_Packet *)gf_list_get(track->buffer, count-2); + /* we update the frame duration if the newly inserted frame modifies it */ + if (!prev_frame->sl_header.au_duration || + prev_frame->sl_header.au_duration > frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp) { + prev_frame->sl_header.au_duration = (u32)(frame->sl_header.decodingTimeStamp - prev_frame->sl_header.decodingTimeStamp); + } + } + } + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Adding frame with PTS %g s and duration %g s (%d frames in buffer)\n", TIMESCALE_TO_SECONDS(frame->sl_header.compositionTimeStamp), TIMESCALE_TO_SECONDS(frame->sl_header.au_duration), gf_list_count(track->buffer))); + gf_mx_v(track->buffer_mutex); +} + static GF_Err gf_mse_process_coded_frame(GF_HTML_SourceBuffer *sb, GF_HTML_Track *track, GF_MSE_Packet *frame, Bool *stored) { + s64 PTS_with_offset = frame->sl_header.compositionTimeStamp + frame->sl_header.timeStampOffset; + s64 DTS_with_offset = frame->sl_header.decodingTimeStamp + frame->sl_header.timeStampOffset; *stored = GF_FALSE; + if (sb->append_mode == MEDIA_SOURCE_APPEND_MODE_SEQUENCE && sb->group_start_timestamp_flag) { - sb->timestampOffset = sb->group_start_timestamp - (frame->sl_header.compositionTimeStamp*1.0/track->timescale); - sb->highest_end_timestamp = sb->group_start_timestamp; - track->needs_rap = GF_TRUE; /* fix: should be on all track buffers */ + u32 i, count; + /* compute the new offset without taking care of the previous one, since this is a new coded frame group */ + /* first adjust existing times to the new timescale */ + gf_mse_source_buffer_set_timescale(sb, track->timescale); + sb->timestampOffset = (sb->group_start_timestamp - frame->sl_header.compositionTimeStamp); + track->timestampOffset = (sb->group_start_timestamp - frame->sl_header.compositionTimeStamp); + sb->group_end_timestamp = sb->group_start_timestamp; + count = gf_list_count(sb->tracks); + for (i = 0; i < count; i++) { + GF_HTML_Track *t = (GF_HTML_Track *)gf_list_get(sb->tracks, i); + t->needs_rap = GF_TRUE; + } sb->group_start_timestamp_flag = GF_FALSE; } - if (sb->timestampOffset != 0) { - u64 offset = (u64)((sb->timestampOffset)*track->timescale); - if (offset > frame->sl_header.compositionTimeStamp || offset > frame->sl_header.decodingTimeStamp) { - return GF_NON_COMPLIANT_BITSTREAM; - } - frame->sl_header.compositionTimeStamp += (u64)(sb->timestampOffset*track->timescale); - frame->sl_header.decodingTimeStamp += (u64)(sb->timestampOffset*track->timescale); - /* check if the new CTS/DTS are in range */ + if (track->timestampOffset != 0) { + frame->sl_header.timeStampOffset = track->timestampOffset; + PTS_with_offset = frame->sl_header.compositionTimeStamp + frame->sl_header.timeStampOffset; + DTS_with_offset = frame->sl_header.decodingTimeStamp + frame->sl_header.timeStampOffset; } if (track->last_dts_set) { - if (track->last_dts*track->timescale > frame->sl_header.decodingTimeStamp) { - return GF_NON_COMPLIANT_BITSTREAM; - } - - /* why ??? - * If last decode timestamp for track buffer is set and decode timestamp is less than last decode timestamp - * or the difference between decode timestamp and last decode timestamp is greater than 100 milliseconds, - * then call endOfStream("decode") and abort these steps. - */ - if (frame->sl_header.decodingTimeStamp - track->last_dts*track->timescale > 0.1*track->timescale) { - return GF_NON_COMPLIANT_BITSTREAM; + if (DTS_with_offset < (s64) track->last_dts || + DTS_with_offset - track->last_dts > 2*track->last_dur) { + /* A discontinuity in the timestamps is detected, this triggers the start of a new coded frame group */ + if (sb->append_mode == MEDIA_SOURCE_APPEND_MODE_SEGMENTS) { + /* the current group ends at the start of this frame */ + /* check if sb.timescale has to be adjusted first with gf_mse_source_buffer_set_timescale(sb, track->timescale);*/ + sb->group_end_timestamp = PTS_with_offset; + sb->group_end_timestamp_set = GF_TRUE; + } else { /* sb->append_mode == MEDIA_SOURCE_APPEND_MODE_SEQUENCE */ + /* check if sb.timescale has to be adjusted first with gf_mse_source_buffer_set_timescale(sb, track->timescale);*/ + sb->group_start_timestamp = sb->group_end_timestamp; + sb->group_start_timestamp_flag = GF_TRUE; + } + { + u32 i, count; + count = gf_list_count(sb->tracks); + for (i = 0; i < count; i++) { + GF_HTML_Track *t = (GF_HTML_Track *)gf_list_get(sb->tracks, i); + t->last_dts_set = GF_FALSE; + t->last_dts = 0; + t->last_dur = 0; + t->highest_pts_set = GF_FALSE; + t->highest_pts = 0; + t->needs_rap = GF_TRUE; + } + } + return gf_mse_process_coded_frame(sb, track, frame, stored); } } + + /* we only update the timestamps in the frame when we are sure the offset is the right one */ + frame->sl_header.compositionTimeStamp += frame->sl_header.timeStampOffset; + frame->sl_header.decodingTimeStamp += frame->sl_header.timeStampOffset; + frame->sl_header.timeStampOffset = 0; - if (frame->sl_header.compositionTimeStamp < sb->appendWindowStart*track->timescale) { + if (frame->sl_header.compositionTimeStamp < SECONDS_TO_TIMESCALE(sb->appendWindowStart)) { track->needs_rap = GF_TRUE; return GF_OK; } - if (frame->sl_header.compositionTimeStamp /* + dur */ > sb->appendWindowEnd*track->timescale) { + if (frame->sl_header.compositionTimeStamp + frame->sl_header.au_duration > SECONDS_TO_TIMESCALE(sb->appendWindowEnd)) { track->needs_rap = GF_TRUE; return GF_OK; } @@ -543,39 +698,46 @@ static GF_Err gf_mse_process_coded_frame(GF_HTML_SourceBuffer *sb, GF_MSE_Packet *overlapped_packet; overlapped_packet = gf_mse_find_overlapped_packet(track, frame); if (overlapped_packet) { - gf_mse_remove_frames_from_to(track, overlapped_packet->sl_header.compositionTimeStamp, overlapped_packet->sl_header.compositionTimeStamp + (u64)(0.000001*track->timescale)); + gf_mse_remove_frames_from_to(track, overlapped_packet->sl_header.compositionTimeStamp, + overlapped_packet->sl_header.compositionTimeStamp + (u64)SECONDS_TO_TIMESCALE(0.000001)); } } if (!track->highest_pts_set) { /* this is the first time a frame is processed in the append sequence */ - gf_mse_remove_frames_from_to(track, frame->sl_header.compositionTimeStamp, frame->sl_header.compositionTimeStamp /* + dur */); - } else if (track->highest_pts*track->timescale <= frame->sl_header.compositionTimeStamp) { + gf_mse_remove_frames_from_to(track, frame->sl_header.compositionTimeStamp, frame->sl_header.compositionTimeStamp + frame->sl_header.au_duration); + } else if (track->highest_pts <= frame->sl_header.compositionTimeStamp) { /* the highest pts has already been set in this append sequence, so we just need to remove frames from that point on, it's safe */ - gf_mse_remove_frames_from_to(track, (u64)(track->highest_pts*track->timescale), (u64)(track->highest_pts*track->timescale) /* + dur */); + gf_mse_remove_frames_from_to(track, track->highest_pts, track->highest_pts + track->last_dur); } - /* remove dependencies: no way !! */ + /* remove dependencies !! */ /* TODO: spliced frames */ *stored = GF_TRUE; - gf_mx_p(track->buffer_mutex); - gf_list_add(track->buffer, frame); - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Adding frame %g (%d frames)\n", (frame->sl_header.compositionTimeStamp*1.0)/track->timescale, gf_list_count(track->buffer))); - gf_mx_v(track->buffer_mutex); + /* adds the packet and update the previous frame duration */ + gf_mse_track_buffer_add_packet(track, frame); - track->last_dts = (frame->sl_header.decodingTimeStamp*1.0/track->timescale); + track->last_dts = frame->sl_header.decodingTimeStamp; track->last_dts_set = GF_TRUE; + if (frame->sl_header.au_duration) { + track->last_dur = frame->sl_header.au_duration; + } else { + /* assuming CFR - FIXME */ + frame->sl_header.au_duration = track->last_dur; + } - if (!track->highest_pts_set || (frame->sl_header.compositionTimeStamp /* + dur */) > track->highest_pts*track->timescale) { + if (!track->highest_pts_set || + (frame->sl_header.compositionTimeStamp + track->last_dur) > track->highest_pts) { track->highest_pts_set = GF_TRUE; - track->highest_pts = (frame->sl_header.compositionTimeStamp*1.0/track->timescale /* + dur */); + track->highest_pts = frame->sl_header.compositionTimeStamp + frame->sl_header.au_duration; } - if (!sb->highest_end_timestamp_set || (frame->sl_header.compositionTimeStamp*1.0 /* + dur */) > sb->highest_end_timestamp * track->timescale) { - sb->highest_end_timestamp_set = GF_TRUE; - sb->highest_end_timestamp = (frame->sl_header.compositionTimeStamp*1.0/track->timescale /* + dur */); + if (!sb->group_end_timestamp_set || (frame->sl_header.compositionTimeStamp + frame->sl_header.au_duration > sb->group_end_timestamp)) { + /* check if sb.timescale has to be adjusted first with gf_mse_source_buffer_set_timescale(sb, track->timescale);*/ + sb->group_end_timestamp = frame->sl_header.compositionTimeStamp + frame->sl_header.au_duration; + sb->group_end_timestamp_set = GF_TRUE; } return GF_OK; @@ -613,7 +775,7 @@ u32 gf_mse_parse_segment(void *par) * AU are placed as GF_MSE_Packets in the track buffer */ track_count = gf_list_count(sb->tracks); - while (1) { + while (sb->updating) { u32 track_with_data = 0; for (i = 0; i < track_count; i++) { Bool stored = GF_FALSE; @@ -629,7 +791,7 @@ u32 gf_mse_parse_segment(void *par) char *data; assert(packet->is_new_data && packet->size); data = (char *)gf_malloc(packet->size); - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] New AU parsed %g\n", (packet->sl_header.compositionTimeStamp*1.0/track->timescale))); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] New AU parsed with PTS %g s\n", TIMESCALE_TO_SECONDS(packet->sl_header.compositionTimeStamp))); memcpy(data, packet->data, packet->size); packet->data = data; gf_mse_process_coded_frame(sb, track, packet, &stored); @@ -670,22 +832,16 @@ void gf_mse_source_buffer_append_arraybuffer(GF_HTML_SourceBuffer *sb, GF_HTML_A gf_list_add(sb->input_buffer, buffer); /* Call the parser (asynchronously) and return */ /* the updating attribute will be positioned back to 0 when the parser is done */ - gf_th_run(sb->parser_thread, gf_mse_parse_segment, sb); -} - -/* -FIXME : Unused function, create warnings on debian -static void gf_mse_source_buffer_append_error(GF_HTML_SourceBuffer *sb) -{ - sb->updating = GF_FALSE; - gf_mse_source_buffer_reset_parser(sb); - TODO: fire events + { + GF_Thread *t = gf_th_new(NULL); + gf_list_add(sb->threads, t); + gf_th_run(t, gf_mse_parse_segment, sb); + } } -*/ /* Threaded function called upon request from JS - Removes data in each track buffer until the next RAP is found */ -u32 gf_mse_source_buffer_remove(void *par) +static u32 gf_mse_source_buffer_remove(void *par) { GF_HTML_SourceBuffer *sb = (GF_HTML_SourceBuffer *)par; u32 i; @@ -693,37 +849,47 @@ u32 gf_mse_source_buffer_remove(void *par) u32 track_count; u32 frame_count; u64 end = 0; - //Bool end_set; + + gf_mse_source_buffer_set_update(sb, GF_TRUE); GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE] Removing media until next RAP\n")); track_count = gf_list_count(sb->tracks); for (i = 0; i < track_count; i++) { GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(sb->tracks, i); - //end_set = GF_FALSE; - + gf_mse_source_buffer_set_timescale(sb, track->timescale); /* find the next random access point */ gf_mx_p(track->buffer_mutex); frame_count = gf_list_count(track->buffer); for (j = 0; j < frame_count; j++) { GF_MSE_Packet *frame = (GF_MSE_Packet *)gf_list_get(track->buffer, j); - if ((frame->sl_header.randomAccessPointFlag && - frame->sl_header.compositionTimeStamp >= sb->remove_end*track->timescale) || - (j == frame_count - 1)) { + if (frame->sl_header.randomAccessPointFlag && + frame->sl_header.compositionTimeStamp >= sb->remove_end) { end = frame->sl_header.compositionTimeStamp; - //end_set = GF_TRUE; break; } } gf_mx_v(track->buffer_mutex); + if (!end) end = (u64)SECONDS_TO_TIMESCALE(sb->remove_end); /* remove up to the next RAP found */ - gf_mse_remove_frames_from_to(track, (u64)sb->remove_start, end); + gf_mse_remove_frames_from_to(track, sb->remove_start, end); } gf_mse_source_buffer_set_update(sb, GF_FALSE); return 0; } +void gf_mse_remove(GF_HTML_SourceBuffer *sb, double start, double end) +{ + sb->remove_start = (u64)(start*sb->timescale); + sb->remove_end = (u64)(end*sb->timescale); + { + GF_Thread *t = gf_th_new(NULL); + gf_list_add(sb->threads, t); + gf_th_run(t, gf_mse_source_buffer_remove, sb); + } +} + /* Callback functions used by a media parser when parsing events happens */ GF_Err gf_mse_proxy(GF_InputService *parser, GF_NetworkCommand *command) { @@ -874,7 +1040,7 @@ GF_Err gf_mse_track_buffer_get_next_packet(GF_HTML_Track *track, *out_reception_status = packet->status; *is_new_data = packet->is_new_data; packet->is_new_data = GF_FALSE; - GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE_IN] Sending AU #%d/%d to decoder with TS: %g \n", track->packet_index, count, (packet->sl_header.compositionTimeStamp*1.0/track->timescale))); + GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[MSE_IN] Sending AU #%d/%d to decoder with PTS %g s\n", track->packet_index, count, TIMESCALE_TO_SECONDS(packet->sl_header.compositionTimeStamp))); } else { *out_data_ptr = NULL; *out_data_size = 0; @@ -886,4 +1052,6 @@ GF_Err gf_mse_track_buffer_get_next_packet(GF_HTML_Track *track, gf_mx_v(track->buffer_mutex); return GF_OK; } + + #endif diff --git a/src/media_tools/img.c b/src/media_tools/img.c index 36fde0c..9a8d4a4 100644 --- a/src/media_tools/img.c +++ b/src/media_tools/img.c @@ -286,7 +286,7 @@ GF_Err gf_img_jpeg_dec(char *jpg, u32 jpg_size, u32 *width, u32 *height, u32 *pi jpx.src.resync_to_restart = jpeg_resync_to_restart; jpx.src.term_source = gf_jpeg_stub; jpx.skip = 0; - jpx.src.next_input_byte = jpg; + jpx.src.next_input_byte = (JOCTET *) jpg; jpx.src.bytes_in_buffer = jpg_size; jpx.cinfo.src = (void *) &jpx.src; diff --git a/src/media_tools/ismacryp.c b/src/media_tools/ismacryp.c index aed1b2f..8d81150 100644 --- a/src/media_tools/ismacryp.c +++ b/src/media_tools/ismacryp.c @@ -62,6 +62,7 @@ void isma_ea_node_start(void *sax_cbck, const char *node_name, const char *name_ if (!stricmp(att->value, "ISMA"))info->crypt_type = 1; else if (!stricmp(att->value, "CENC AES-CTR")) info->crypt_type = 2; else if (!stricmp(att->value, "CENC AES-CBC")) info->crypt_type = 3; + else if (!stricmp(att->value, "ADOBE")) info->crypt_type = 4; } } return; @@ -173,6 +174,15 @@ void isma_ea_node_start(void *sax_cbck, const char *node_name, const char *name_ else if (!strncmp(att->value, "roll=", 5)) tkc->keyRoll = atoi(att->value+5); } + else if (!stricmp(att->name, "metadata")) { + tkc->metadata_len = gf_base64_encode(att->value, (u32) strlen(att->value), tkc->metadata, 5000); + tkc->metadata[tkc->metadata_len] = 0; + } + } + + if ((info->crypt_type == 3) && (tkc->IV_size == 8)) { + GF_LOG(GF_LOG_WARNING, GF_LOG_AUTHOR, ("[CENC] Using AES-128 CBC: IV_size should be 16\n")); + tkc->IV_size = 16; } } @@ -788,7 +798,7 @@ static void cenc_resync_IV(GF_Crypt *mc, char IV[16], u64 BSO, u8 IV_size, Bool } else { prev_block_count = BSO / 16; - remain = BSO % 16; + remain = (u32) (BSO % 16); } tmp = gf_bs_new(IV, 16, GF_BITSTREAM_READ); @@ -948,7 +958,7 @@ static GF_Err gf_cenc_encrypt_sample_cbc(GF_Crypt *mc, GF_ISOSample *samp, Bool pleintext_bs = gf_bs_new(samp->data, samp->dataLength, GF_BITSTREAM_READ); cyphertext_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); sai_bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); - gf_bs_write_data(sai_bs, IV, IV_size); + gf_bs_write_data(sai_bs, IV, 16); subsamples = gf_list_new(); if (!subsamples) { e = GF_IO_ERR; @@ -1216,8 +1226,11 @@ GF_Err gf_cenc_encrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*pro if (tci->enc_type == 2) gf_cenc_encrypt_sample_ctr(mc, samp, is_nalu_video, nalu_size_length, IV, tci->IV_size, &buf, &len, bytes_in_nalhr); - else if (tci->enc_type == 3) + else if (tci->enc_type == 3) { + int IV_size = 16; + gf_crypt_get_state(mc, IV, &IV_size); gf_cenc_encrypt_sample_cbc(mc, samp, is_nalu_video, nalu_size_length, IV, tci->IV_size, &buf, &len, bytes_in_nalhr); + } gf_isom_update_sample(mp4, track, i+1, samp, 1); gf_isom_sample_del(&samp); @@ -1350,6 +1363,10 @@ GF_Err gf_cenc_decrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*pro gf_bs_del(bs); gf_crypt_set_state(mc, IV, 17); } + else if (tci->enc_type == 3) { + memmove(IV, sai->IV, 16); + gf_crypt_set_state(mc, IV, 16); + } e = gf_crypt_set_key(mc, tci->key, 16, IV); if (e) { GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[CENC] Cannot set key AES-128 %s (%s)\n", (tci->enc_type == 2) ? "CTR" : "CBC", gf_error_to_string(e)) ); @@ -1433,6 +1450,247 @@ exit: return e; } +GF_Err gf_adobe_encrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk) +{ + GF_Err e; + char IV[16]; + GF_ISOSample *samp; + GF_Crypt *mc; + Bool all_rap = GF_FALSE; + u32 i, count, di, track, len; + Bool has_crypted_samp; + char *buf; + GF_BitStream *bs; + int IV_size; + + e = GF_OK; + samp = NULL; + mc = NULL; + buf = NULL; + bs = NULL; + + track = gf_isom_get_track_by_id(mp4, tci->trackID); + if (!track) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[Adobe] Cannot find TrackID %d in input file - skipping\n", tci->trackID)); + return GF_OK; + } + + mc = gf_crypt_open("AES-128", "CBC"); + if (!mc) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[Adobe] Cannot open AES-128 CBC \n")); + e = GF_IO_ERR; + goto exit; + } + + /*Adobe's protection scheme does not support selective key*/ + memcpy(tci->key, tci->keys[0], 16); + + e = gf_isom_set_adobe_protection(mp4, track, 1, GF_ISOM_ADOBE_SCHEME, 1, GF_TRUE, tci->metadata, tci->metadata_len); + if (e) goto exit; + + count = gf_isom_get_sample_count(mp4, track); + has_crypted_samp = GF_FALSE; + if (! gf_isom_has_sync_points(mp4, track)) + all_rap = GF_TRUE; + + gf_isom_set_nalu_extract_mode(mp4, track, GF_ISOM_NALU_EXTRACT_INSPECT); + for (i = 0; i < count; i++) { + Bool is_encrypted_au = GF_TRUE; + samp = gf_isom_get_sample(mp4, track, i+1, &di); + if (!samp) + { + e = GF_IO_ERR; + goto exit; + } + + len = samp->dataLength; + buf = (char *) gf_malloc(len*sizeof(char)); + memmove(buf, samp->data, len); + gf_free(samp->data); + samp->dataLength = 0; + + switch (tci->sel_enc_type) { + case GF_CRYPT_SELENC_RAP: + if (!samp->IsRAP && !all_rap) { + is_encrypted_au = GF_FALSE; + } + break; + case GF_CRYPT_SELENC_NON_RAP: + if (samp->IsRAP || all_rap) { + is_encrypted_au = GF_FALSE; + } + break; + default: + break; + } + + if (is_encrypted_au) { + u32 padding_bytes; + if (!has_crypted_samp) { + memset(IV, 0, sizeof(char)*16); + memcpy(IV, tci->first_IV, sizeof(char)*16); + e = gf_crypt_init(mc, tci->key, 16, IV); + if (e) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Cannot initialize AES-128 CBC (%s)\n", gf_error_to_string(e)) ); + gf_crypt_close(mc); + mc = NULL; + e = GF_IO_ERR; + goto exit; + } + has_crypted_samp = GF_TRUE; + } + else { + IV_size = 16; + e = gf_crypt_get_state(mc, IV, &IV_size); + } + + padding_bytes = 16 - len % 16; + len += padding_bytes; + buf = (char *)gf_realloc(buf, len); + memset(buf+len-padding_bytes, padding_bytes, padding_bytes); + + gf_crypt_encrypt(mc, buf, len); + } + + /*rewrite sample with AU header*/ + bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + if (is_encrypted_au) { + gf_bs_write_u8(bs, 0x10); + gf_bs_write_data(bs, (char *) IV, 16); + } + else { + gf_bs_write_u8(bs, 0x0); + } + gf_bs_write_data(bs, buf, len); + gf_bs_get_content(bs, &samp->data, &samp->dataLength); + gf_bs_del(bs); + bs = NULL; + gf_isom_update_sample(mp4, track, i+1, samp, 1); + gf_isom_sample_del(&samp); + samp = NULL; + gf_free(buf); + buf = NULL; + + gf_set_progress("Adobe's protection scheme Encrypt", i+1, count); + } + +exit: + if (samp) gf_isom_sample_del(&samp); + if (mc) gf_crypt_close(mc); + if (buf) gf_free(buf); + if (bs) gf_bs_del(bs); + return e; +} + +GF_Err gf_adobe_decrypt_track(GF_ISOFile *mp4, GF_TrackCryptInfo *tci, void (*progress)(void *cbk, u64 done, u64 total), void *cbk) +{ + GF_Err e; + u32 track, count, len, i, prev_sample_decrypted, si; + u8 encrypted_au; + GF_Crypt *mc; + GF_ISOSample *samp; + char IV[17]; + char *ptr; + GF_BitStream *bs; + + e = GF_OK; + mc = NULL; + samp = NULL; + bs = NULL; + prev_sample_decrypted = GF_FALSE; + + track = gf_isom_get_track_by_id(mp4, tci->trackID); + if (!track) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Cannot find TrackID %d in input file - skipping\n", tci->trackID)); + return GF_OK; + } + + mc = gf_crypt_open("AES-128", "CBC"); + if (!mc) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Cannot open AES-128 CBC\n")); + e = GF_IO_ERR; + goto exit; + } + + memcpy(tci->key, tci->keys[0], 16); + + count = gf_isom_get_sample_count(mp4, track); + gf_isom_set_nalu_extract_mode(mp4, track, GF_ISOM_NALU_EXTRACT_INSPECT); + for (i = 0; i < count; i++) { + u32 trim_bytes = 0; + samp = gf_isom_get_sample(mp4, track, i+1, &si); + if (!samp) + { + e = GF_IO_ERR; + goto exit; + } + + ptr = samp->data; + len = samp->dataLength; + + encrypted_au = ptr[0]; + if (encrypted_au) { + memmove(IV, ptr+1, 16); + if (!prev_sample_decrypted) { + e = gf_crypt_init(mc, tci->key, 16, IV); + if (e) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Cannot initialize AES-128 CBC (%s)\n", gf_error_to_string(e)) ); + gf_crypt_close(mc); + mc = NULL; + e = GF_IO_ERR; + goto exit; + } + prev_sample_decrypted = GF_TRUE; + } + else { + e = gf_crypt_set_state(mc, IV, 16); + if (e) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Cannot set state AES-128 CBC (%s)\n", gf_error_to_string(e)) ); + gf_crypt_close(mc); + mc = NULL; + e = GF_IO_ERR; + goto exit; + } + } + + ptr += 17; + len -= 17; + + gf_crypt_decrypt(mc, ptr, len); + trim_bytes = ptr[len-1]; + } + else { + ptr += 1; + len -= 1; + } + + //rewrite decrypted sample + bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + gf_bs_write_data(bs, ptr, len - trim_bytes); + gf_free(samp->data); + samp->dataLength = 0; + gf_bs_get_content(bs, &samp->data, &samp->dataLength); + gf_isom_update_sample(mp4, track, i+1, samp, 1); + gf_bs_del(bs); + bs = NULL; + gf_isom_sample_del(&samp); + samp = NULL; + gf_set_progress("Adobe's protection scheme Decrypt", i+1, count); + } + + /*remove protection info*/ + e = gf_isom_remove_track_protection(mp4, track, 1); + if (e) { + GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[ADOBE] Error Adobe's protection scheme signature from trackID %d: %s\n", tci->trackID, gf_error_to_string(e))); + } + +exit: + if (mc) gf_crypt_close(mc); + if (samp) gf_isom_sample_del(&samp); + if (bs) gf_bs_del(bs); + return e; +} + GF_EXPORT GF_Err gf_decrypt_file(GF_ISOFile *mp4, const char *drm_file) @@ -1500,6 +1758,9 @@ GF_Err gf_decrypt_file(GF_ISOFile *mp4, const char *drm_file) tci.enc_type = 3; gf_decrypt_track = gf_cenc_decrypt_track; break; + case 4: + gf_decrypt_track = gf_adobe_decrypt_track; + break; default: GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[CENC/ISMA] Encryption type not supported\n")); return GF_NOT_SUPPORTED;; @@ -1514,7 +1775,7 @@ GF_Err gf_decrypt_file(GF_ISOFile *mp4, const char *drm_file) } KMS_URI = "OMA DRM"; is_oma = 1; - } else if (!gf_isom_is_cenc_media(mp4, i+1, 1)){ + } else if (!gf_isom_is_cenc_media(mp4, i+1, 1) && !gf_isom_is_adobe_protection_media(mp4, i+1, 1)){ GF_LOG(GF_LOG_WARNING, GF_LOG_AUTHOR, ("[CENC/ISMA] TrackID %d encrypted with unknown scheme %s - skipping\n", trackID, gf_4cc_to_str(scheme_type) )); continue; } @@ -1752,6 +2013,9 @@ GF_Err gf_crypt_file(GF_ISOFile *mp4, const char *drm_file) tci->enc_type = 3; gf_encrypt_track = gf_cenc_encrypt_track; break; + case 4: + gf_encrypt_track = gf_adobe_encrypt_track; + break; default: GF_LOG(GF_LOG_ERROR, GF_LOG_AUTHOR, ("[CENC/ISMA] Encryption type not sopported\n")); return GF_NOT_SUPPORTED;; diff --git a/src/media_tools/isom_tools.c b/src/media_tools/isom_tools.c index b6d356a..b8f419e 100644 --- a/src/media_tools/isom_tools.c +++ b/src/media_tools/isom_tools.c @@ -97,6 +97,42 @@ GF_Err gf_media_change_par(GF_ISOFile *file, u32 track, s32 ar_num, s32 ar_den) } return gf_isom_set_track_layout_info(file, track, tk_w<<16, tk_h<<16, 0, 0, 0); } + +GF_EXPORT +GF_Err gf_media_remove_non_rap(GF_ISOFile *file, u32 track) +{ + GF_Err e; + u32 i, count, di; + u64 offset, dur, last_dts; + Bool all_raps = (gf_isom_has_sync_points(file, track)==0) ? 1 : 0; + if (all_raps) return GF_OK; + + last_dts = 0; + dur = gf_isom_get_media_duration(file, track); + + gf_isom_set_cts_packing(file, track, 1); + + count = gf_isom_get_sample_count(file, track); + for (i=0; iIsRAP) { + last_dts = samp->DTS; + gf_isom_sample_del(&samp); + continue; + } + gf_isom_sample_del(&samp); + e = gf_isom_remove_sample(file, track, i+1); + if (e) return e; + i--; + count--; + } + gf_isom_set_cts_packing(file, track, 0); + gf_isom_set_last_sample_duration(file, track, (u32) (dur - last_dts) ); + return GF_OK; +} + #endif /*GPAC_DISABLE_ISOM_WRITE*/ GF_EXPORT @@ -1907,7 +1943,10 @@ static GF_HEVCParamArray *alloc_hevc_param_array(GF_HEVCConfig *hevc_cfg, u8 typ GF_SAFEALLOC(ar, GF_HEVCParamArray); ar->nalus = gf_list_new(); ar->type = type; - gf_list_add(hevc_cfg->param_array, ar); + if (ar->type == GF_HEVC_NALU_VID_PARAM) + gf_list_insert(hevc_cfg->param_array, ar, 0); + else + gf_list_add(hevc_cfg->param_array, ar); return ar; } @@ -1926,7 +1965,7 @@ GF_Err gf_media_split_shvc(GF_ISOFile *file, u32 track, Bool splitAll, Bool use_ { SHVCTrackInfo sti[64]; GF_HEVCConfig *hevccfg, *shvccfg; - u32 i, count, cur_extract_mode, j, k; + u32 i, count, cur_extract_mode, j, k, max_layer_id; char *nal_data=NULL; u32 nal_alloc_size; GF_Err e = GF_OK; @@ -1943,7 +1982,7 @@ GF_Err gf_media_split_shvc(GF_ISOFile *file, u32 track, Bool splitAll, Bool use_ memset(sti, 0, sizeof(sti)); sti[0].track_num = track; - + max_layer_id = 0; //split all SPS/PPS/VPS from svccfg count = gf_list_count(shvccfg->param_array); for (i=0; iparam_array; + memcpy(sti[layer_id].shvccfg , shvccfg ? shvccfg : hevccfg, sizeof(GF_HEVCConfig)); + sti[layer_id].shvccfg->param_array = backup_list; + sti[layer_id].shvccfg->is_shvc = 1; sti[layer_id].shvccfg->complete_representation = 1; sti[layer_id].shvccfg->num_layers = 1; @@ -1976,7 +2023,40 @@ GF_Err gf_media_split_shvc(GF_ISOFile *file, u32 track, Bool splitAll, Bool use_ count2--; } } - //remove shvc config + + //CLARIFY wether this is correct: we duplicate all VPS in the enhancement layer ... + //we do this because if we split the tracks some info for setting up the enhancement layer + //is in the VPS + count = gf_list_count(hevccfg->param_array); + for (i=0; iparam_array, i); + if (ar->type != GF_HEVC_NALU_VID_PARAM) continue; + count2 = gf_list_count(ar->nalus); + for (j=0; jnalus, j); + u8 layer_id = ((sl->data[0] & 0x1) << 5) | (sl->data[1] >> 3); + if (layer_id) continue; + + for (k=0; k <= max_layer_id; k++) { + GF_AVCConfigSlot *sl2; + if (!sti[k].shvccfg) continue; + + s_ar = alloc_hevc_param_array(sti[k].shvccfg, ar->type); + s_ar->array_completeness = ar->array_completeness; + + GF_SAFEALLOC(sl2, GF_AVCConfigSlot); + sl2->data = gf_malloc(sl->size); + memcpy(sl2->data, sl->data, sl->size); + sl2->id = sl->id; + sl2->size = sl->size; + gf_list_add(s_ar->nalus, sl2); + } + } + } + + //update shvc config e = gf_isom_shvc_config_update(file, track, 1, NULL, 0); if (e) goto exit; @@ -2051,6 +2131,9 @@ GF_Err gf_media_split_shvc(GF_ISOFile *file, u32 track, Bool splitAll, Bool use_ if (e) goto exit; gf_isom_set_track_reference(file, sti[j].track_num, GF_4CC('s','b','a','s'), track_id); + + gf_isom_set_nalu_extract_mode(file, sti[j].track_num, GF_ISOM_NALU_EXTRACT_INSPECT); + //get lower layer for (k=j; k>0; k--) { if (sti[k-1].track_num) { @@ -2143,7 +2226,7 @@ GF_Err gf_media_split_shvc(GF_ISOFile *file, u32 track, Bool splitAll, Bool use_ exit: //reset all scalable info - for (j=0; j<64; j++) { + for (j=0; j<=max_layer_id; j++) { if (sti[j].shvccfg) gf_odf_hevc_cfg_del(sti[j].shvccfg); } gf_isom_set_nalu_extract_mode(file, track, cur_extract_mode); @@ -2250,8 +2333,7 @@ GF_Err gf_media_split_hevc_tiles(GF_ISOFile *file) GF_BitStream *bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); GF_ISOSample *sample = gf_isom_get_sample(file, track, i+1, &di); - - data = sample->data; + data = (u8 *) sample->data; size = sample->dataLength; sample->data = NULL; sample->dataLength = 0; @@ -2261,7 +2343,7 @@ GF_Err gf_media_split_hevc_tiles(GF_ISOFile *file) if (e) goto err_exit; } - sample->data = data; + sample->data = (char *) data; cur_tile = 0; while (size) { @@ -2287,13 +2369,13 @@ GF_Err gf_media_split_hevc_tiles(GF_ISOFile *file) case GF_HEVC_NALU_SLICE_RADL_R: case GF_HEVC_NALU_SLICE_RASL_R: //ret = hevc_parse_slice_segment(bs, hevc, &n_state); - e = gf_isom_append_sample_data(file, tiles_track[cur_tile], data, nalu_size + nalu_size_length); + e = gf_isom_append_sample_data(file, tiles_track[cur_tile], (char *) data, nalu_size + nalu_size_length); if (e) goto err_exit; cur_tile++; break; default: - gf_bs_write_data(bs, data, nalu_size + nalu_size_length); + gf_bs_write_data(bs, (char *) data, nalu_size + nalu_size_length); break; } data += nalu_size + nalu_size_length; @@ -2482,7 +2564,7 @@ GF_Err gf_media_fragment_file(GF_ISOFile *input, const char *output_file, Double if (e) goto err_exit; /*copy subsample information*/ - e = gf_isom_fragment_copy_subsample(output, tf->TrackID, input, tf->OriginalTrack, tf->SampleNum + 1); + e = gf_isom_fragment_copy_subsample(output, tf->TrackID, input, tf->OriginalTrack, tf->SampleNum + 1, GF_FALSE); if (e) goto err_exit; @@ -2576,7 +2658,7 @@ GF_Err gf_media_split_tiles(GF_ISOFile *file) if (! hevc.pps[pps_idx].tiles_enabled_flag) return GF_OK; nb_tracks = hevc.pps[pps_idx].num_tile_columns * hevc.pps[pps_idx].num_tile_rows; - tiles_track = malloc(sizeof(u32) * nb_tracks); + tiles_track = gf_malloc(sizeof(u32) * nb_tracks); for (i=0; idata; + data = (u8 *) sample->data; size = sample->dataLength; sample->data = NULL; sample->dataLength = 0; @@ -2621,10 +2703,10 @@ GF_Err gf_media_split_tiles(GF_ISOFile *file) case GF_HEVC_NALU_SLICE_RADL_R: case GF_HEVC_NALU_SLICE_RASL_R: //ret = hevc_parse_slice_segment(bs, hevc, &n_state); - gf_isom_append_sample_data(file, track, data, nalu_size + nalu_size_length); + gf_isom_append_sample_data(file, track, (char *) data, nalu_size + nalu_size_length); break; default: - gf_bs_write_data(bs, data, nalu_size + nalu_size_length); + gf_bs_write_data(bs, (char *) data, nalu_size + nalu_size_length); break; } data += nalu_size + nalu_size_length; diff --git a/src/media_tools/m2ts_mux.c b/src/media_tools/m2ts_mux.c index e01f2f2..c023363 100644 --- a/src/media_tools/m2ts_mux.c +++ b/src/media_tools/m2ts_mux.c @@ -50,6 +50,9 @@ static GFINLINE Bool gf_m2ts_time_less(GF_M2TS_Time *a, GF_M2TS_Time *b) { if (a->sec==b->sec) return (a->nanosecnanosec) ? 1 : 0; return 1; } +static GFINLINE Bool gf_m2ts_time_equal(GF_M2TS_Time *a, GF_M2TS_Time *b) { + return ((a->sec==b->sec) && (a->nanosec == b->nanosec) ); +} static GFINLINE Bool gf_m2ts_time_less_or_equal(GF_M2TS_Time *a, GF_M2TS_Time *b) { if (a->sec>b->sec) return 0; if (a->sec==b->sec) return (a->nanosec>b->nanosec) ? 0 : 1; @@ -730,18 +733,18 @@ u32 gf_m2ts_stream_process_pmt(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream) case GF_M2TS_AUDIO_AC3: gf_bs_write_int(bs, GF_M2TS_REGISTRATION_DESCRIPTOR, 8); gf_bs_write_int(bs, 4, 8); - gf_bs_write_int(bs, 0x41, 8); - gf_bs_write_int(bs, 0x43, 8); - gf_bs_write_int(bs, 0x2D, 8); - gf_bs_write_int(bs, 0x33, 8); + gf_bs_write_int(bs, 'A', 8); + gf_bs_write_int(bs, 'C', 8); + gf_bs_write_int(bs, '-', 8); + gf_bs_write_int(bs, '3', 8); break; case GF_M2TS_VIDEO_VC1: gf_bs_write_int(bs, GF_M2TS_REGISTRATION_DESCRIPTOR, 8); gf_bs_write_int(bs, 4, 8); - gf_bs_write_int(bs, 0x56, 8); - gf_bs_write_int(bs, 0x43, 8); - gf_bs_write_int(bs, 0x2D, 8); - gf_bs_write_int(bs, 0x31, 8); + gf_bs_write_int(bs, 'V', 8); + gf_bs_write_int(bs, 'C', 8); + gf_bs_write_int(bs, '-', 8); + gf_bs_write_int(bs, '1', 8); break; case GF_M2TS_AUDIO_EC3: gf_bs_write_int(bs, GF_M2TS_DVB_EAC3_DESCRIPTOR, 8); @@ -780,7 +783,7 @@ u32 gf_m2ts_stream_process_pmt(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream) return 0; } -static void gf_m2ts_remap_timestamps_for_pes(GF_M2TS_Mux_Stream *stream, u32 pck_flags, u64 *dts, u64 *cts) +static void gf_m2ts_remap_timestamps_for_pes(GF_M2TS_Mux_Stream *stream, u32 pck_flags, u64 *dts, u64 *cts, u32 *duration) { u64 pcr_offset; @@ -788,6 +791,8 @@ static void gf_m2ts_remap_timestamps_for_pes(GF_M2TS_Mux_Stream *stream, u32 pck if (stream->ts_scale) { *cts = (u64) (stream->ts_scale * (s64) *cts); *dts = (u64) (stream->ts_scale * (s64) *dts); + if (duration) *duration = (u32) (stream->ts_scale * (u32) *duration); + } if (!stream->program->initial_ts_set) { u32 nb_bits = (u32) (stream->program->mux->tot_pck_sent - stream->program->num_pck_at_pcr_init) * 1504; @@ -808,6 +813,8 @@ static void gf_m2ts_remap_timestamps_for_pes(GF_M2TS_Mux_Stream *stream, u32 pck else if (*dts < stream->last_dts) { GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS Muxer] PID %d: DTS "LLD" is less than last sent DTS "LLD"\n", stream->pid, *dts, stream->last_dts)); stream->last_dts = *dts; + } else { + stream->last_dts = *dts; } /*offset our timestamps*/ @@ -820,6 +827,28 @@ static void gf_m2ts_remap_timestamps_for_pes(GF_M2TS_Mux_Stream *stream, u32 pck *dts = *dts - stream->program->initial_ts + pcr_offset; } +static void id3_tag_create(char **input, u32 *len) +{ + GF_BitStream *bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + gf_bs_write_u8(bs, 'I'); + gf_bs_write_u8(bs, 'D'); + gf_bs_write_u8(bs, '3'); + gf_bs_write_u8(bs, 4); + gf_bs_write_u8(bs, 0); + gf_bs_write_int(bs, 0, 1); + gf_bs_write_int(bs, 0, 1); + gf_bs_write_int(bs, 0, 1); + gf_bs_write_int(bs, 0x1F, 5); + gf_bs_write_u32(bs, GF_4CC('T','X','X','X')); + gf_bs_write_u32(bs, *len); /* size of the text */ + gf_bs_write_u8(bs, 0); + gf_bs_write_u8(bs, 0); + gf_bs_write_data(bs, *input, *len); + gf_free(*input); + gf_bs_get_content(bs, input, len); + gf_bs_del(bs); +} + u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream) { Bool ret = 0; @@ -880,6 +909,7 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream stream->curr_pck.data = curr_pck->data; stream->curr_pck.data_len = curr_pck->data_len; stream->curr_pck.dts = curr_pck->dts; + stream->curr_pck.duration = curr_pck->duration; stream->curr_pck.flags = curr_pck->flags; stream->curr_pck.mpeg2_af_descriptors = curr_pck->mpeg2_af_descriptors; stream->curr_pck.mpeg2_af_descriptors_size = curr_pck->mpeg2_af_descriptors_size; @@ -896,15 +926,15 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream stream->curr_pck.dts = stream->curr_pck.cts; /*initializing the PCR*/ - if (!stream->program->pcr_init_time) { + if (!stream->program->pcr_init_time_set) { if (stream==stream->program->pcr) { if (stream->program->mux->init_pcr_value) { - stream->program->pcr_init_time = stream->program->mux->init_pcr_value; + stream->program->pcr_init_time = stream->program->mux->init_pcr_value-1; } else { while (!stream->program->pcr_init_time) stream->program->pcr_init_time = gf_rand(); } - + stream->program->pcr_init_time_set = 1; stream->program->ts_time_at_pcr_init = muxer->time; stream->program->num_pck_at_pcr_init = muxer->tot_pck_sent; @@ -1072,6 +1102,13 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream /*since we reallocated the packet data buffer, force a discard in pull mode*/ stream->discard_data = 1; break; + case GF_M2TS_METADATA_PES: + case GF_M2TS_METADATA_ID3_HLS: + { + id3_tag_create(&stream->curr_pck.data, &stream->curr_pck.data_len); + stream->discard_data = 1; + } + break; } if (stream->start_pes_at_rap && (stream->curr_pck.flags & GF_ESI_DATA_AU_RAP) @@ -1081,7 +1118,7 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream } /*rewrite timestamps for PES header*/ - gf_m2ts_remap_timestamps_for_pes(stream, stream->curr_pck.flags, &stream->curr_pck.dts, &stream->curr_pck.cts); + gf_m2ts_remap_timestamps_for_pes(stream, stream->curr_pck.flags, &stream->curr_pck.dts, &stream->curr_pck.cts, &stream->curr_pck.duration); /*compute next interesting time in TS unit: this will be DTS of next packet*/ @@ -1106,7 +1143,7 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream stream->bytes_since_last_time = 0; stream->pes_since_last_time = 0; } else { - u32 time_diff = (u32) (stream->curr_pck.dts - stream->last_br_time - 1 ); + u32 time_diff = (u32) (stream->curr_pck.dts + 1 - stream->last_br_time ); if ((stream->pes_since_last_time > 4) && (time_diff >= BITRATE_UPDATE_WINDOW)) { u32 bitrate; u64 r = 8*stream->bytes_since_last_time; @@ -1119,6 +1156,22 @@ u32 gf_m2ts_stream_process_stream(GF_M2TS_Mux *muxer, GF_M2TS_Mux_Stream *stream stream->program->mux->needs_reconfig = 1; } } + + /* in live with no fixed target rate, we have to always compute the bitrate in case we have a peak (IDR), otherwise the mux time will increase too fast and we will send packets way too fast + this is not perfect, we may end up with a too high stream rate and the mux time will increase too slowly, hence packet will be late*/ + if (stream->program->mux->real_time && !stream->program->mux->fixed_rate && stream->curr_pck.duration) { + u64 inst_rate; + inst_rate = 8*stream->curr_pck.data_len; + inst_rate *= 90000; + inst_rate /= stream->curr_pck.duration; + inst_rate /= 8; + if (inst_rate>stream->bit_rate) + { + stream->bit_rate = (u32) inst_rate; + stream->program->mux->needs_reconfig = 1; + } + } + stream->pes_since_last_time ++; return stream->scheduling_priority + stream->pcr_priority; } @@ -1193,7 +1246,7 @@ void gf_m2ts_stream_update_data_following(GF_M2TS_Mux_Stream *stream) if (stream->next_payload_size) { stream->next_payload_size += stream->reframe_overhead; - gf_m2ts_remap_timestamps_for_pes(stream, stream->next_pck_flags, &stream->next_pck_dts, &stream->next_pck_cts); + gf_m2ts_remap_timestamps_for_pes(stream, stream->next_pck_flags, &stream->next_pck_dts, &stream->next_pck_cts, NULL); } } @@ -1307,7 +1360,7 @@ static u32 gf_m2ts_stream_get_pes_header_length(GF_M2TS_Mux_Stream *stream) return hdr_len; } -u32 gf_m2ts_stream_add_pes_header(GF_BitStream *bs, GF_M2TS_Mux_Stream *stream, u32 payload_length) +u32 gf_m2ts_stream_add_pes_header(GF_BitStream *bs, GF_M2TS_Mux_Stream *stream) { u64 t, dts, cts; u32 pes_len; @@ -1381,7 +1434,7 @@ u32 gf_m2ts_stream_add_pes_header(GF_BitStream *bs, GF_M2TS_Mux_Stream *stream, gf_bs_write_long_int(bs, t, 15); gf_bs_write_int(bs, 1, 1); // marker bit } - GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS Muxer] PID %d: Adding PES header at PCR "LLD" - has PTS %d (%d) - has DTS %d (%d)\n", stream->pid, gf_m2ts_get_pcr(stream->program)/300, use_pts, cts, use_dts, dts)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS Muxer] PID %d: Adding PES header at PCR "LLD" - has PTS %d ("LLU") - has DTS %d ("LLU") - Payload length %d\n", stream->pid, gf_m2ts_get_pcr(stream->program)/300, use_pts, cts, use_dts, dts, pes_len)); return pes_len+4; // 4 = start code + stream_id } @@ -1428,7 +1481,7 @@ void gf_m2ts_mux_pes_get_next_packet(GF_M2TS_Mux_Stream *stream, char *packet) adaptation_field_control = GF_M2TS_ADAPTATION_AND_PAYLOAD; } if (stream->curr_pck.mpeg2_af_descriptors) { - if (adaptation_field_control != GF_M2TS_ADAPTATION_AND_PAYLOAD) { + if (adaptation_field_control == GF_M2TS_ADAPTATION_NONE) { payload_length -= 2; //AF header but no PCR adaptation_field_control = GF_M2TS_ADAPTATION_AND_PAYLOAD; } @@ -1469,8 +1522,10 @@ void gf_m2ts_mux_pes_get_next_packet(GF_M2TS_Mux_Stream *stream, char *packet) else { /*AF headers*/ if (!needs_pcr) { - payload_length -= 2; - adaptation_field_control = GF_M2TS_ADAPTATION_AND_PAYLOAD; + if (adaptation_field_control == GF_M2TS_ADAPTATION_NONE) { + payload_length -= 2; + adaptation_field_control = GF_M2TS_ADAPTATION_AND_PAYLOAD; + } } /*cannot add adaptation field for this TS packet with this payload, we need to split in 2 TS packets*/ if (payload_length < payload_to_copy + copy_next) { @@ -1529,7 +1584,7 @@ void gf_m2ts_mux_pes_get_next_packet(GF_M2TS_Mux_Stream *stream, char *packet) stream->program->mux->tot_pes_pad_bytes += padding_length; } - if (hdr_len) gf_m2ts_stream_add_pes_header(bs, stream, payload_length); + if (hdr_len) gf_m2ts_stream_add_pes_header(bs, stream); pos = (u32) gf_bs_get_position(bs); gf_bs_del(bs); @@ -1544,14 +1599,18 @@ void gf_m2ts_mux_pes_get_next_packet(GF_M2TS_Mux_Stream *stream, char *packet) // gf_m2ts_time_inc(&stream->time, payload_to_copy + pos - 4, stream->bit_rate); if (stream->pck_offset == stream->curr_pck.data_len) { + if (stream->program->mux->real_time && !stream->program->mux->fixed_rate && gf_m2ts_time_less(&stream->time, &stream->program->mux->time) ) { + GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG2-TS Muxer] Done sending PES from PID %d TOO LATE: stream time %d:%d - mux time %d:%d (current mux rate %d) \n", stream->pid, stream->time.sec, stream->time.nanosec, stream->program->mux->time.sec, stream->program->mux->time.nanosec, stream->program->mux->bit_rate)); + } else { + GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[MPEG2-TS Muxer] Done sending PES (%d bytes) from PID %d at stream time %d:%d (DTS "LLD" - PCR "LLD")\n", stream->curr_pck.data_len, stream->pid, stream->time.sec, stream->time.nanosec, stream->curr_pck.dts, gf_m2ts_get_pcr(stream->program)/300)); + } + /*PES has been sent, discard internal buffer*/ if (stream->discard_data) gf_free(stream->curr_pck.data); stream->curr_pck.data = NULL; stream->curr_pck.data_len = 0; stream->pck_offset = 0; - GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[MPEG2-TS Muxer] Done sending PES (%d bytes) from PID %d at stream time %d:%d (DTS "LLD" - PCR "LLD")\n", stream->curr_pck.data_len, stream->pid, stream->time.sec, stream->time.nanosec, stream->curr_pck.dts, gf_m2ts_get_pcr(stream->program)/300)); - #ifndef GPAC_DISABLE_LOG if (gf_log_tool_level_on(GF_LOG_CONTAINER, GF_LOG_INFO) && gf_m2ts_time_less(&stream->program->mux->time, &stream->time) @@ -1662,6 +1721,7 @@ GF_Err gf_m2ts_output_ctrl(GF_ESInterface *_self, u32 ctrl_type, void *param) GF_SAFEALLOC(stream->pck_reassembler, GF_M2TS_Packet); stream->pck_reassembler->cts = esi_pck->cts; stream->pck_reassembler->dts = esi_pck->dts; + stream->pck_reassembler->duration = esi_pck->duration; if (esi_pck->mpeg2_af_descriptors) { stream->pck_reassembler->mpeg2_af_descriptors = gf_realloc(stream->pck_reassembler->mpeg2_af_descriptors, sizeof(u8)* (stream->pck_reassembler->mpeg2_af_descriptors_size + esi_pck->mpeg2_af_descriptors_size) ); memcpy(stream->pck_reassembler->mpeg2_af_descriptors + stream->pck_reassembler->mpeg2_af_descriptors_size, esi_pck->mpeg2_af_descriptors, sizeof(u8)* esi_pck->mpeg2_af_descriptors_size ); @@ -1760,6 +1820,51 @@ static void gf_m2ts_stream_add_hierarchy_descriptor(GF_M2TS_Mux_Stream *stream) gf_list_add(stream->loop_descriptors, desc); } +static void gf_m2ts_stream_add_metadata_pointer_descriptor(GF_M2TS_Mux_Program *program) +{ + GF_M2TSDescriptor *desc; + GF_BitStream *bs; + u32 data_len; + bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + gf_bs_write_u16(bs, 0xFFFF); + gf_bs_write_u32(bs, GF_4CC('I','D','3',' ')); + gf_bs_write_u8(bs, 0xFF); + gf_bs_write_u32(bs, GF_4CC('I','D','3',' ')); + gf_bs_write_u8(bs, 0); /* service id */ + gf_bs_write_int(bs, 0, 1); /* locator */ + gf_bs_write_int(bs, 0, 2); /* carriage flags */ + gf_bs_write_int(bs, 0x1F, 5); /* reserved */ + gf_bs_write_u16(bs, program->number); + GF_SAFEALLOC(desc, GF_M2TSDescriptor); + desc->tag = (u8) GF_M2TS_METADATA_POINTER_DESCRIPTOR; + gf_bs_get_content(bs, &desc->data, &data_len); + gf_bs_del(bs); + desc->data_len = (u8) data_len; + gf_list_add(program->loop_descriptors, desc); +} + +static void gf_m2ts_stream_add_metadata_descriptor(GF_M2TS_Mux_Stream *stream) +{ + GF_M2TSDescriptor *desc; + GF_BitStream *bs; + u32 data_len; + bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); + gf_bs_write_u16(bs, 0xFFFF); + gf_bs_write_u32(bs, GF_4CC('I','D','3',' ')); + gf_bs_write_u8(bs, 0xFF); + gf_bs_write_u32(bs, GF_4CC('I','D','3',' ')); + gf_bs_write_u8(bs, 0); /* service id */ + gf_bs_write_int(bs, 0, 3); /* decoder config flags */ + gf_bs_write_int(bs, 0, 1); /* dsmcc flag */ + gf_bs_write_int(bs, 0xF, 4); /* reserved */ + GF_SAFEALLOC(desc, GF_M2TSDescriptor); + desc->tag = (u8) GF_M2TS_METADATA_DESCRIPTOR; + gf_bs_get_content(bs, &desc->data, &data_len); + gf_bs_del(bs); + desc->data_len = (u8) data_len; + gf_list_add(stream->loop_descriptors, desc); +} + GF_EXPORT GF_M2TS_Mux_Stream *gf_m2ts_program_stream_add(GF_M2TS_Mux_Program *program, struct __elementary_stream_ifce *ifce, u32 pid, Bool is_pcr, Bool force_pes) { @@ -1897,9 +2002,13 @@ GF_M2TS_Mux_Stream *gf_m2ts_program_stream_add(GF_M2TS_Mux_Program *program, str stream->mpeg2_stream_type = GF_M2TS_SYSTEMS_MPEG4_SECTIONS; } break; + case GF_STREAM_TEXT: + stream->mpeg2_stream_id = 0xBD; + stream->mpeg2_stream_type = GF_M2TS_METADATA_PES; + gf_m2ts_stream_add_metadata_pointer_descriptor(stream->program); + gf_m2ts_stream_add_metadata_descriptor(stream); } - if (! (ifce->caps & GF_ESI_STREAM_WITHOUT_MPEG4_SYSTEMS)) { /*override signaling for all streams except BIFS/OD, to use MPEG-4 PES*/ if (program->mpeg4_signaling==GF_M2TS_MPEG4_SIGNALING_FULL) { @@ -1938,7 +2047,8 @@ GF_M2TS_Mux_Program *gf_m2ts_mux_program_add(GF_M2TS_Mux *muxer, u32 program_num program->mux = muxer; program->mpeg4_signaling = mpeg4_signaling; program->pcr_offset = pcr_offset; - + program->loop_descriptors = gf_list_new(); + program->number = program_number; if (muxer->programs) { GF_M2TS_Mux_Program *p = muxer->programs; @@ -2138,7 +2248,7 @@ GF_EXPORT GF_Err gf_m2ts_mux_set_initial_pcr(GF_M2TS_Mux *muxer, u64 init_pcr_value) { if (!muxer) return GF_BAD_PARAM; - muxer->init_pcr_value = init_pcr_value; + muxer->init_pcr_value = 1 + init_pcr_value; return GF_OK; } @@ -2161,7 +2271,7 @@ const char *gf_m2ts_mux_process(GF_M2TS_Mux *muxer, u32 *status, u32 *usec_till_ if (!muxer->init_sys_time) { //init TS time muxer->time.sec = muxer->time.nanosec = 0; - gf_m2ts_time_inc(&muxer->time, (u32) muxer->init_pcr_value, 27000000); + gf_m2ts_time_inc(&muxer->time, (u32) (muxer->init_pcr_value ? muxer->init_pcr_value-1 : 0), 27000000); muxer->init_sys_time = now; muxer->init_ts_time = muxer->time; } else { @@ -2291,9 +2401,9 @@ const char *gf_m2ts_mux_process(GF_M2TS_Mux *muxer, u32 *status, u32 *usec_till_ if (!flush_all_pes && muxer->force_pat) return gf_m2ts_mux_process(muxer, status, usec_till_next); - if (res && gf_m2ts_time_less_or_equal(&stream->time, &time)) { - /*if same priority schedule the earliest data*/ - if (res>=highest_priority) { + if (res) { + /*always schedule the earliest data*/ + if (gf_m2ts_time_less(&stream->time, &time)) { highest_priority = res; time = stream->time; stream_to_process = stream; @@ -2301,6 +2411,17 @@ const char *gf_m2ts_mux_process(GF_M2TS_Mux *muxer, u32 *status, u32 *usec_till_ goto send_pck; #endif } + else if (gf_m2ts_time_equal(&stream->time, &time)) { + /*if the same priority schedule base stream first*/ + if ((res > highest_priority) || ((res == highest_priority) && !stream->ifce->depends_on_stream)){ + highest_priority = res; + time = stream->time; + stream_to_process = stream; +#if FORCE_PCR_FIRST + goto send_pck; +#endif + } + } } } nb_streams++; diff --git a/src/media_tools/media_export.c b/src/media_tools/media_export.c index 651b204..6b546ca 100644 --- a/src/media_tools/media_export.c +++ b/src/media_tools/media_export.c @@ -454,6 +454,7 @@ GF_Err gf_media_export_samples(GF_MediaExporter *dumper) if (is_mj2k) write_jp2_file(bs, samp->data, samp->dataLength, dsi, dsi_size); else { +#ifndef GPAC_DISABLE_TTXT if (is_wvtt) { GF_Err e; e = gf_webvtt_dump_header(out, dumper->file, track, 1); @@ -462,7 +463,9 @@ GF_Err gf_media_export_samples(GF_MediaExporter *dumper) u32 timescale = gf_isom_get_media_timescale(dumper->file, track); gf_webvtt_dump_iso_sample(out, timescale, samp); } - } else { + } else +#endif + { gf_bs_write_data(bs, samp->data, samp->dataLength); } } @@ -503,6 +506,7 @@ GF_Err gf_media_export_samples(GF_MediaExporter *dumper) if (is_mj2k) write_jp2_file(bs, samp->data, samp->dataLength, dsi, dsi_size); else { +#ifndef GPAC_DISABLE_TTXT if (is_wvtt) { GF_Err e; e = gf_webvtt_dump_header(out, dumper->file, track, 1); @@ -511,7 +515,9 @@ GF_Err gf_media_export_samples(GF_MediaExporter *dumper) u32 timescale = gf_isom_get_media_timescale(dumper->file, track); gf_webvtt_dump_iso_sample(out, timescale, samp); } - } else { + } else +#endif + { gf_bs_write_data(bs, samp->data, samp->dataLength); } } @@ -972,8 +978,12 @@ GF_Err gf_media_export_native(GF_MediaExporter *dumper) if (is_vobsub) return gf_dump_to_vobsub(dumper, szName, track, dsi, dsi_size); if (is_webvtt) { +#ifndef GPAC_DISABLE_TTXT GF_Err gf_webvtt_dump_iso_track(GF_MediaExporter *dumper, char *szName, u32 track, Bool merge); return gf_webvtt_dump_iso_track(dumper, szName, track, (dumper->flags & GF_EXPORT_WEBVTT_NOMERGE? GF_FALSE : GF_TRUE)); +#else + return GF_NOT_SUPPORTED; +#endif } if (qcp_type>1) { @@ -2221,6 +2231,7 @@ GF_Err gf_media_export_webvtt_metadata(GF_MediaExporter *dumper) if (layer) fprintf(vtt, "layer:%d\n", layer); } if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { +#ifndef GPAC_DISABLE_TTXT if (isText) { if (mstype == GF_ISOM_SUBTYPE_WVTT) { /* Warning: Just use -raw export */ @@ -2243,7 +2254,9 @@ GF_Err gf_media_export_webvtt_metadata(GF_MediaExporter *dumper) gf_webvtt_dump_header_boxed(med, esd->decoderConfig->decoderSpecificInfo->data+4, esd->decoderConfig->decoderSpecificInfo->dataLength, &headerLength); fprintf(vtt, "text-header-length: %d\n", headerLength); } - } else { + } else +#endif + { char b64[200]; u32 size = gf_base64_encode(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, b64, 200); useBase64 = GF_TRUE; @@ -2303,6 +2316,7 @@ GF_Err gf_media_export_webvtt_metadata(GF_MediaExporter *dumper) GF_ISOSample *samp = gf_isom_get_sample(dumper->file, track, i+1, &di); if (!samp) break; +#ifndef GPAC_DISABLE_TTXT { GF_WebVTTTimestamp start, end; u64 dur = gf_isom_get_sample_duration(dumper->file, track, i+1); @@ -2322,6 +2336,7 @@ GF_Err gf_media_export_webvtt_metadata(GF_MediaExporter *dumper) else fprintf(vtt, "isRAP:false "); fprintf(vtt, "\n"); } +#endif if (med) { gf_fwrite(samp->data, samp->dataLength, 1, med); } else if (dumper->flags & GF_EXPORT_WEBVTT_META_EMBEDDED) { @@ -2411,12 +2426,15 @@ GF_Err gf_media_export_six(GF_MediaExporter *dumper) header_size = 0; if (esd) { if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) { +#ifndef GPAC_DISABLE_TTXT if (mstype == GF_ISOM_SUBTYPE_WVTT || mstype == GF_ISOM_SUBTYPE_STSE) { gf_webvtt_dump_header_boxed(media, esd->decoderConfig->decoderSpecificInfo->data+4, esd->decoderConfig->decoderSpecificInfo->dataLength, &header_size); - } else { + } else +#endif + { gf_fwrite(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, 1, media); header_size = esd->decoderConfig->decoderSpecificInfo->dataLength; } @@ -2693,6 +2711,10 @@ GF_Err gf_media_export_ts_native(GF_MediaExporter *dumper) strcat(szFile, ".264"); gf_export_message(dumper, GF_OK, "Extracting H264-SVC Visual stream to h264"); break; + case GF_M2TS_METADATA_ID3_HLS: + strcat(szFile, ".txt"); + gf_export_message(dumper, GF_OK, "Extracting ID3 tags from metadata stream"); + break; default: strcat(szFile, ".raw"); gf_export_message(dumper, GF_OK, "Extracting Unknown stream to raw"); diff --git a/src/media_tools/media_import.c b/src/media_tools/media_import.c index 3a2f4ea..f6ccfe6 100644 --- a/src/media_tools/media_import.c +++ b/src/media_tools/media_import.c @@ -782,7 +782,7 @@ GF_Err gf_import_aac_loas(GF_MediaImporter *import) samp = gf_isom_sample_new(); samp->IsRAP = 1; samp->dataLength = nbbytes; - samp->data = aac_buf; + samp->data = (char *) aac_buf; e = gf_isom_add_sample(import->dest, track, di, samp); if (e) goto exit; @@ -4274,6 +4274,8 @@ restart_import: e = gf_isom_avc_config_new(import->dest, track, avccfg, NULL, NULL, &di); if (e) goto exit; + gf_isom_set_nalu_extract_mode(import->dest, track, GF_ISOM_NALU_EXTRACT_INSPECT); + sample_data = NULL; sample_is_rap = 0; sample_has_islice = 0; @@ -5064,6 +5066,43 @@ static GF_HEVCParamArray *get_hevc_param_array(GF_HEVCConfig *hevc_cfg, u8 type) } return NULL; } + + +static void hevc_set_parall_type(GF_HEVCConfig *hevc_cfg) +{ + u32 use_tiles, use_wpp, nb_pps; + HEVCState hevc; + GF_HEVCParamArray *ar = get_hevc_param_array(hevc_cfg, GF_HEVC_NALU_PIC_PARAM); + u32 i, count = gf_list_count(ar->nalus); + + memset(&hevc, 0, sizeof(HEVCState)); + hevc.sps_active_idx = -1; + + use_tiles = 0; + use_wpp = 0; + nb_pps = 0; + + + for (i=0; inalus, i); + s32 idx = gf_media_hevc_read_pps(slc->data, slc->size, &hevc); + + if (idx>=0) { + nb_pps++; + pps = &hevc.pps[idx]; + if (!pps->entropy_coding_sync_enabled_flag && pps->tiles_enabled_flag) + use_tiles++; + else if (pps->entropy_coding_sync_enabled_flag && !pps->tiles_enabled_flag) + use_wpp++; + } + } + if (!use_tiles && !use_wpp) hevc_cfg->parallelismType = 1; + else if (!use_wpp && (use_tiles==nb_pps) ) hevc_cfg->parallelismType = 2; + else if (!use_tiles && (use_wpp==nb_pps) ) hevc_cfg->parallelismType = 3; + else hevc_cfg->parallelismType = 0; +} + #endif static GF_Err gf_import_hevc(GF_MediaImporter *import) @@ -5087,7 +5126,7 @@ static GF_Err gf_import_hevc(GF_MediaImporter *import) Bool flush_sample, flush_next_sample, is_empty_sample, sample_is_rap, sample_has_islice, first_nal, slice_is_ref, has_cts_offset, is_paff, set_subsamples, slice_force_ref; u32 ref_frame, timescale, copy_size, size_length, dts_inc; s32 last_poc, max_last_poc, max_last_b_poc, poc_diff, prev_last_poc, min_poc, poc_shift; - Bool first_avc; + Bool first_hevc; u32 use_opengop_gdr = 0; u8 layer_ids[64]; @@ -5134,7 +5173,7 @@ restart_import: shvc_cfg->non_hevc_base_layer = 0; buffer = (char*)gf_malloc(sizeof(char) * max_size); sample_data = NULL; - first_avc = 1; + first_hevc = 1; sei_recovery_frame_count = -1; spss = ppss = vpss = NULL; @@ -5169,6 +5208,7 @@ restart_import: e = gf_isom_hevc_config_new(import->dest, track, hevc_cfg, NULL, NULL, &di); if (e) goto exit; + gf_isom_set_nalu_extract_mode(import->dest, track, GF_ISOM_NALU_EXTRACT_INSPECT); memset(layer_ids, 0, sizeof(u8)*64); sample_data = NULL; @@ -5281,7 +5321,7 @@ restart_import: dst_cfg->avgFrameRate = hevc.vps[idx].rates[0].avg_pic_rate; dst_cfg->constantFrameRate = hevc.vps[idx].rates[0].constand_pic_rate_idc; - dst_cfg->numTemporalLayers = hevc.vps[idx].max_sub_layer; + dst_cfg->numTemporalLayers = hevc.vps[idx].max_sub_layers; dst_cfg->temporalIdNested = hevc.vps[idx].temporal_id_nesting; //TODO set scalability mask @@ -5343,12 +5383,6 @@ restart_import: dst_cfg->luma_bit_depth = hevc.sps[idx].bit_depth_luma; dst_cfg->chroma_bit_depth = hevc.sps[idx].bit_depth_chroma; - //need VUI for these ... - //u16 min_spatial_segmentation_idc; - //u8 parallelismType; - //u16 avgFrameRate; - //u8 constantFrameRate; - if (!spss) { GF_SAFEALLOC(spss, GF_HEVCParamArray); spss->nalus = gf_list_new(); @@ -5387,8 +5421,8 @@ restart_import: goto restart_import; } - if (first_avc) { - first_avc = 0; + if (first_hevc) { + first_hevc = 0; gf_import_message(import, GF_OK, "HEVC import - frame size %d x %d at %02.3f FPS", hevc.sps[idx].width, hevc.sps[idx].height, FPS); } else { gf_import_message(import, GF_OK, "SHVC detected - %d x %d at %02.3f FPS", hevc.sps[idx].width, hevc.sps[idx].height, FPS); @@ -5842,11 +5876,20 @@ next_nal: } if (gf_list_count(hevc_cfg->param_array) || !gf_list_count(shvc_cfg->param_array) ) { + hevc_set_parall_type(hevc_cfg); gf_isom_hevc_config_update(import->dest, track, 1, hevc_cfg); if (gf_list_count(shvc_cfg->param_array)) { + hevc_set_parall_type(shvc_cfg); + + shvc_cfg->avgFrameRate = hevc_cfg->avgFrameRate; + shvc_cfg->constantFrameRate = hevc_cfg->constantFrameRate; + shvc_cfg->numTemporalLayers = hevc_cfg->numTemporalLayers; + shvc_cfg->temporalIdNested = hevc_cfg->temporalIdNested; + gf_isom_shvc_config_update(import->dest, track, 1, shvc_cfg, 1); } } else { + hevc_set_parall_type(shvc_cfg); gf_isom_shvc_config_update(import->dest, track, 1, shvc_cfg, 0); } @@ -6927,6 +6970,14 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) gf_import_message(import, GF_OK, "[MPEG-2 TS] PMT Update found - cannot import any further"); import->flags |= GF_IMPORT_DO_ABORT; break; + case GF_M2TS_EVT_DURATION_ESTIMATED: + prog = (GF_M2TS_Program*)par; + + if (import->flags & GF_IMPORT_PROBE_ONLY) { + import->probe_duration = ((GF_M2TS_PES_PCK *) par)->PTS; + //import->flags |= GF_IMPORT_DO_ABORT; + } + break; /*case GF_M2TS_EVT_SDT_FOUND: import->nb_progs = gf_list_count(ts->SDTs); @@ -6992,7 +7043,8 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) tsimp->nb_video++; break; case GF_M2TS_VIDEO_HEVC: - import->tk_info[idx].media_type = GF_4CC('h','e','v','c'); + case GF_M2TS_VIDEO_SHVC: + import->tk_info[idx].media_type = (es->stream_type==GF_M2TS_VIDEO_SHVC) ? GF_4CC('S','H','V','C') : GF_4CC('H','E','V','C'); import->tk_info[idx].type = GF_ISOM_MEDIA_VISUAL; import->tk_info[idx].lang = pes->lang; import->nb_tracks++; @@ -7059,6 +7111,14 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) } import->nb_tracks++; break; + case GF_M2TS_METADATA_ID3_HLS: + import->tk_info[idx].media_type = GF_4CC('I','D','3',' '); + import->tk_info[idx].type = GF_ISOM_MEDIA_META; + import->tk_info[idx].lang = pes->lang; + import->nb_tracks++; + break; + default: + gf_import_message(import, GF_OK, "[MPEG-2 TS] Ignoring stream of type %d", es->stream_type); } } } else { @@ -7103,7 +7163,7 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) //ses = (GF_M2TS_SECTION_ES *)es; } else { pes = (GF_M2TS_PES *)es; - gf_m2ts_set_pes_framing(pes, GF_M2TS_PES_FRAMING_DEFAULT); + gf_m2ts_set_pes_framing(pes, GF_M2TS_PES_FRAMING_DEFAULT_NAL); } mtype = stype = oti = 0; @@ -7240,6 +7300,7 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) if (!ts->has_4on2 && (tsimp->nb_video_configured == tsimp->nb_video) && (tsimp->nb_audio_configured == tsimp->nb_audio) + && import->probe_duration ) { import->flags |= GF_IMPORT_DO_ABORT; } @@ -7408,7 +7469,7 @@ void on_m2ts_import_data(GF_M2TS_Demuxer *ts, u32 evt_type, void *par) tsimp->hevc.vps[idx].state = 2; tsimp->hevccfg->avgFrameRate = tsimp->hevc.vps[idx].rates[0].avg_pic_rate; tsimp->hevccfg->constantFrameRate = tsimp->hevc.vps[idx].rates[0].constand_pic_rate_idc; - tsimp->hevccfg->numTemporalLayers = tsimp->hevc.vps[idx].max_sub_layer; + tsimp->hevccfg->numTemporalLayers = tsimp->hevc.vps[idx].max_sub_layers; hevc_cfg_add_nalu(tsimp->hevccfg, nal_type, pck->data+4, pck->data_len-4); } return; @@ -7704,6 +7765,7 @@ GF_Err gf_import_mpeg_ts(GF_MediaImporter *import) ts = gf_m2ts_demux_new(); ts->on_event = on_m2ts_import_data; ts->user = &tsimp; + ts->file_size = fsize; ts->dvb_h_demux = (import->flags & GF_IMPORT_MPE_DEMUX) ? 1 : 0; @@ -7718,6 +7780,7 @@ GF_Err gf_import_mpeg_ts(GF_MediaImporter *import) break; gf_m2ts_process_data(ts, data, size); + ts->nb_pck++; if (import->flags & GF_IMPORT_DO_ABORT) break; done += size; if (do_import) gf_set_progress(progress, (u32) (done/1024), (u32) (fsize/1024)); @@ -7764,6 +7827,7 @@ GF_Err gf_import_mpeg_ts(GF_MediaImporter *import) if (tsimp.hevccfg) { u32 w = ((GF_M2TS_PES*)es)->vid_w; u32 h = ((GF_M2TS_PES*)es)->vid_h; + hevc_set_parall_type(tsimp.hevccfg); gf_isom_hevc_config_update(import->dest, tsimp.track, 1, tsimp.hevccfg); gf_isom_set_visual_info(import->dest, tsimp.track, 1, w, h); gf_isom_set_track_layout_info(import->dest, tsimp.track, w<<16, h<<16, 0, 0, 0); diff --git a/src/media_tools/mpd.c b/src/media_tools/mpd.c index 45f430c..70708a4 100644 --- a/src/media_tools/mpd.c +++ b/src/media_tools/mpd.c @@ -944,6 +944,8 @@ GF_Err gf_mpd_init_from_dom(GF_XMLNode *root, GF_MPD *mpd, const char *default_b mpd->availabilityStartTime = gf_mpd_parse_date(att->value); } else if (!strcmp(att->name, "availabilityEndTime")) { mpd->availabilityEndTime = gf_mpd_parse_date(att->value); + } else if (!strcmp(att->name, "publishTime")) { + mpd->publishTime = gf_mpd_parse_date(att->value); } else if (!strcmp(att->name, "mediaPresentationDuration")) { mpd->media_presentation_duration = gf_mpd_parse_duration(att->value); } else if (!strcmp(att->name, "minimumUpdatePeriod")) { diff --git a/src/media_tools/mpegts.c b/src/media_tools/mpegts.c index 1857530..094722d 100644 --- a/src/media_tools/mpegts.c +++ b/src/media_tools/mpegts.c @@ -78,6 +78,9 @@ const char *gf_m2ts_get_stream_name(u32 streamType) case GF_M2TS_SYSTEMS_MPEG4_SECTIONS: return "MPEG-4 SL (Section)"; case GF_M2TS_MPE_SECTIONS: return "MPE (Section)"; + case GF_M2TS_METADATA_PES: return "Metadata (PES)"; + case GF_M2TS_METADATA_ID3_HLS: return "ID3/HLS Metadata (PES)"; + default: return "Unknown"; } } @@ -87,7 +90,7 @@ static void gf_m2ts_estimate_duration(GF_M2TS_Demuxer *ts, u64 PCR, u16 pcr_pid) u64 file_size = 0; // if (ts->duration>0) return; - if (ts->file) { + if (ts->file || ts->file_size) { file_size = ts->file_size; } else if (ts->dnload) { u32 size; @@ -135,7 +138,7 @@ static void gf_m2ts_estimate_duration(GF_M2TS_Demuxer *ts, u64 PCR, u16 pcr_pid) } } -static u32 gf_m2ts_reframe_default(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_default(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { GF_M2TS_PES_PCK pck; pck.flags = 0; @@ -151,13 +154,13 @@ static u32 gf_m2ts_reframe_default(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool s return 0; } -static u32 gf_m2ts_reframe_reset(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_reset(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { - if (pes->data) { - gf_free(pes->data); - pes->data = NULL; + if (pes->pck_data) { + gf_free(pes->pck_data); + pes->pck_data = NULL; } - pes->data_len = 0; + pes->pck_data_len = pes->pck_alloc_len = 0; if (pes->prev_data) { gf_free(pes->prev_data); pes->prev_data = NULL; @@ -167,10 +170,11 @@ static u32 gf_m2ts_reframe_reset(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool sam pes->prev_PTS = 0; pes->reframe = NULL; pes->cc = -1; + pes->temi_tc_desc_len = 0; return 0; } -static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, Bool is_hevc) +static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr, Bool is_hevc) { Bool au_start_in_pes=0; Bool prev_is_au_delim=0; @@ -179,7 +183,9 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo Bool short_start_code = 0; Bool esc_code_found = 0; u32 nal_type, sc_pos = 0; - + u32 first_nal_offset_in_pck = 0; + Bool full_au_pes_mode = 0; + u8 *au_start = NULL; GF_M2TS_PES_PCK pck; if (!same_pts) @@ -231,6 +237,7 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo } data += sc_pos; + first_nal_offset_in_pck += sc_pos; data_len -= sc_pos; } start_code_found = short_start_code ? 2 : 1; @@ -266,33 +273,52 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo } } #endif - /*check AU start type*/ - if (nal_type==GF_HEVC_NALU_ACCESS_UNIT) { + /*check AU start type - if this is an SHVC PID and the first nal is the first byte of the PES payload, consider this is an AU start*/ + if ((nal_type==GF_HEVC_NALU_ACCESS_UNIT) || (pes->depends_on_pid && !first_nal_offset_in_pck)) { if (!prev_is_au_delim) { + //this was not a one AU per PES config, dispatch + if (au_start) { + pck.data = (char *)au_start; + pck.data_len = (u32) (data - au_start); + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + au_start = NULL; + full_au_pes_mode = 0; + } + if (au_start_in_pes) { /*FIXME - we should check the AVC framerate to update the timing ...*/ pck.DTS += 3000; pck.PTS += 3000; // GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID%d: Two AVC AUs start in this PES packet - cannot recompute non-first AU timing\n", pes->pid)); } + pck.flags = GF_M2TS_PES_PCK_AU_START; force_new_au = 0; au_start_in_pes = 1; - ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + if (pes_hdr->data_alignment && !first_nal_offset_in_pck && !pes->single_nal_mode) { + full_au_pes_mode = GF_TRUE; + au_start = (u8 *) pck.data; + } else { + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } prev_is_au_delim=1; } - } else if ((nal_type==GF_HEVC_NALU_SLICE_IDR_W_DLP) - || (nal_type==GF_HEVC_NALU_SLICE_IDR_N_LP) - ) { - pck.flags = GF_M2TS_PES_PCK_RAP; - ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } else if ((nal_type>=GF_HEVC_NALU_SLICE_BLA_W_LP) && (nal_type<=GF_HEVC_NALU_SLICE_CRA)) { + if (!full_au_pes_mode) { + pck.flags = GF_M2TS_PES_PCK_RAP; + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } else { + pck.flags |= GF_M2TS_PES_PCK_RAP; + } prev_is_au_delim=0; } else #endif //GPAC_DISABLE_HEVC { - pck.flags = 0; - ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + if (!full_au_pes_mode) { + pck.flags = 0; + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } prev_is_au_delim=0; } } else { @@ -316,6 +342,16 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo /*check AU start type*/ if ((nal_type==GF_AVC_NALU_ACCESS_UNIT) || (nal_type==GF_AVC_NALU_VDRD)) { if (!prev_is_au_delim) { + + //this was not a one AU per PES config, dispatch + if (au_start) { + pck.data = (char *)au_start; + pck.data_len = (u32) (data - au_start); + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + au_start = NULL; + full_au_pes_mode = 0; + } + if (au_start_in_pes) { /*FIXME - we should check the AVC framerate to update the timing ...*/ pck.DTS += 3000; @@ -325,18 +361,28 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo pck.flags = GF_M2TS_PES_PCK_AU_START; force_new_au = 0; au_start_in_pes = 1; - ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + if (pes_hdr->data_alignment && !first_nal_offset_in_pck && !pes->single_nal_mode) { + full_au_pes_mode = GF_TRUE; + au_start = (u8 *) pck.data; + } else { + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } prev_is_au_delim=1; } } else { - pck.flags = (nal_type==GF_AVC_NALU_IDR_SLICE) ? GF_M2TS_PES_PCK_RAP : 0; - ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + if (!full_au_pes_mode) { + pck.flags = (nal_type==GF_AVC_NALU_IDR_SLICE) ? GF_M2TS_PES_PCK_RAP : 0; + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + } else { + if (nal_type==GF_AVC_NALU_IDR_SLICE) pck.flags |= GF_M2TS_PES_PCK_RAP; + } prev_is_au_delim=0; } } data += sc_pos; data_len -= sc_pos; + first_nal_offset_in_pck += sc_pos; sc_pos = 0; if (esc_code_found) { @@ -350,12 +396,39 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo /*we did not consume all data*/ if (!start_code_found) { u32 min_size = is_hevc ? 6 : 5; + + if (au_start) { + pck.data = (char *)au_start; + pck.data_len = (u32) (data - au_start); + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + au_start = NULL; + } + /*if not enough data to locate start code, store it*/ if (data_len < min_size ) return data_len; /*otherwise this is the middle of a frame, let's dispatch it*/ } + if (au_start) { + if (is_hevc) { +#ifndef GPAC_DISABLE_HEVC + nal_type = (data[4] & 0x7E) >> 1; + if ((nal_type>=GF_HEVC_NALU_SLICE_BLA_W_LP) && (nal_type<=GF_HEVC_NALU_SLICE_CRA)) { + pck.flags |= GF_M2TS_PES_PCK_RAP; + } +#endif + } else { + nal_type = data[4] & 0x1F; + if (nal_type==GF_AVC_NALU_IDR_SLICE) pck.flags |= GF_M2TS_PES_PCK_RAP; + } + + pck.data = (char *)au_start; + pck.data_len = (u32) (data - au_start) + data_len; + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + return 0; + } + if (data_len) { pck.flags = 0; pck.data = (char *)data; @@ -396,17 +469,17 @@ static u32 gf_m2ts_reframe_nalu_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo return 0; } -static u32 gf_m2ts_reframe_avc_h264(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_avc_h264(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { - return gf_m2ts_reframe_nalu_video(ts, pes, same_pts, data, data_len, 0); + return gf_m2ts_reframe_nalu_video(ts, pes, same_pts, data, data_len, pes_hdr, 0); } -static u32 gf_m2ts_reframe_hevc(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_hevc(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { - return gf_m2ts_reframe_nalu_video(ts, pes, same_pts, data, data_len, 1); + return gf_m2ts_reframe_nalu_video(ts, pes, same_pts, data, data_len, pes_hdr, 1); } -static u32 gf_m2ts_reframe_mpeg_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_mpeg_video(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { u32 sc_pos = 0; u32 to_send = data_len; @@ -494,7 +567,7 @@ typedef struct u32 profile, sr_idx, nb_ch, frame_size; } ADTSHeader; -static u32 gf_m2ts_reframe_aac_adts(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_aac_adts(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { ADTSHeader hdr; u32 sc_pos = 0; @@ -647,7 +720,7 @@ static u32 gf_m2ts_reframe_aac_adts(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool return data_len - sc_pos; } -static u32 gf_m2ts_reframe_aac_latm(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_aac_latm(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { u32 sc_pos = 0; u32 start = 0; @@ -786,7 +859,7 @@ static u32 gf_m2ts_reframe_aac_latm(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool #ifndef GPAC_DISABLE_AV_PARSERS -static u32 gf_m2ts_reframe_mpeg_audio(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len) +static u32 gf_m2ts_reframe_mpeg_audio(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) { GF_M2TS_PES_PCK pck; u32 pos, frame_size, remain; @@ -873,7 +946,186 @@ static u32 gf_m2ts_reframe_mpeg_audio(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Boo /*we consumed all data*/ return 0; } + #endif /*GPAC_DISABLE_AV_PARSERS*/ +typedef enum { + ID3V2_FRAME_AENC = GF_4CC('A','E','N','C'), + ID3V2_FRAME_APIC = GF_4CC('A','P','I','C'), + ID3V2_FRAME_COMM = GF_4CC('C','O','M','M'), + ID3V2_FRAME_COMR = GF_4CC('C','O','M','R'), + ID3V2_FRAME_ENCR = GF_4CC('E','N','C','R'), + ID3V2_FRAME_EQUA = GF_4CC('E','Q','U','A'), + ID3V2_FRAME_ETCO = GF_4CC('E','T','C','O'), + ID3V2_FRAME_GEOB = GF_4CC('G','E','O','B'), + ID3V2_FRAME_GRID = GF_4CC('G','R','I','D'), + ID3V2_FRAME_IPLS = GF_4CC('I','P','L','S'), + ID3V2_FRAME_LINK = GF_4CC('L','I','N','K'), + ID3V2_FRAME_MCDI = GF_4CC('M','C','D','I'), + ID3V2_FRAME_MLLT = GF_4CC('M','L','L','T'), + ID3V2_FRAME_OWNE = GF_4CC('O','W','N','E'), + ID3V2_FRAME_PRIV = GF_4CC('P','R','I','V'), + ID3V2_FRAME_PCNT = GF_4CC('P','C','N','T'), + ID3V2_FRAME_POPM = GF_4CC('P','O','P','M'), + ID3V2_FRAME_POSS = GF_4CC('P','O','S','S'), + ID3V2_FRAME_RBUF = GF_4CC('R','B','U','F'), + ID3V2_FRAME_RVAD = GF_4CC('R','V','A','D'), + ID3V2_FRAME_RVRB = GF_4CC('R','V','R','B'), + ID3V2_FRAME_SYLT = GF_4CC('S','Y','L','T'), + ID3V2_FRAME_SYTC = GF_4CC('S','Y','T','C'), + ID3V2_FRAME_TALB = GF_4CC('T','E','N','C'), + ID3V2_FRAME_TBPM = GF_4CC('T','B','P','M'), + ID3V2_FRAME_TCOM = GF_4CC('T','C','O','M'), + ID3V2_FRAME_TCON = GF_4CC('T','C','O','N'), + ID3V2_FRAME_TCOP = GF_4CC('T','C','O','P'), + ID3V2_FRAME_TDAT = GF_4CC('T','D','A','T'), + ID3V2_FRAME_TDLY = GF_4CC('T','D','L','Y'), + ID3V2_FRAME_TENC = GF_4CC('T','E','N','C'), + ID3V2_FRAME_TEXT = GF_4CC('T','E','X','T'), + ID3V2_FRAME_TFLT = GF_4CC('T','F','L','T'), + ID3V2_FRAME_TIME = GF_4CC('T','I','M','E'), + ID3V2_FRAME_TIT1 = GF_4CC('T','I','T','1'), + ID3V2_FRAME_TIT2 = GF_4CC('T','I','T','2'), + ID3V2_FRAME_TIT3 = GF_4CC('T','I','T','3'), + ID3V2_FRAME_TKEY = GF_4CC('T','K','E','Y'), + ID3V2_FRAME_TLAN = GF_4CC('T','L','A','N'), + ID3V2_FRAME_TLEN = GF_4CC('T','L','E','N'), + ID3V2_FRAME_TMED = GF_4CC('T','M','E','D'), + ID3V2_FRAME_TOAL = GF_4CC('T','O','A','L'), + ID3V2_FRAME_TOFN = GF_4CC('T','O','F','N'), + ID3V2_FRAME_TOLY = GF_4CC('T','O','L','Y'), + ID3V2_FRAME_TOPE = GF_4CC('T','O','P','E'), + ID3V2_FRAME_TORY = GF_4CC('T','O','R','Y'), + ID3V2_FRAME_TOWN = GF_4CC('T','O','W','N'), + ID3V2_FRAME_TPE1 = GF_4CC('T','P','E','1'), + ID3V2_FRAME_TPE2 = GF_4CC('T','P','E','2'), + ID3V2_FRAME_TPE3 = GF_4CC('T','P','E','3'), + ID3V2_FRAME_TPE4 = GF_4CC('T','P','E','4'), + ID3V2_FRAME_TPOS = GF_4CC('T','P','E','5'), + ID3V2_FRAME_TPUB = GF_4CC('T','P','U','B'), + ID3V2_FRAME_TRCK = GF_4CC('T','R','C','K'), + ID3V2_FRAME_TRDA = GF_4CC('T','R','D','A'), + ID3V2_FRAME_TRSN = GF_4CC('T','R','S','N'), + ID3V2_FRAME_TRSO = GF_4CC('T','R','S','O'), + ID3V2_FRAME_TSIZ = GF_4CC('T','S','I','Z'), + ID3V2_FRAME_TSRC = GF_4CC('T','S','R','C'), + ID3V2_FRAME_TSSE = GF_4CC('T','S','S','E'), + ID3V2_FRAME_TYER = GF_4CC('T','Y','E','R'), + ID3V2_FRAME_TXXX = GF_4CC('T','X','X','X'), + ID3V2_FRAME_UFID = GF_4CC('U','F','I','D'), + ID3V2_FRAME_USER = GF_4CC('U','S','E','R'), + ID3V2_FRAME_USLT = GF_4CC('U','S','L','T'), + ID3V2_FRAME_WCOM = GF_4CC('W','C','O','M'), + ID3V2_FRAME_WCOP = GF_4CC('W','C','O','P'), + ID3V2_FRAME_WOAF = GF_4CC('W','O','A','F'), + ID3V2_FRAME_WOAR = GF_4CC('W','O','A','R'), + ID3V2_FRAME_WOAS = GF_4CC('W','O','A','S'), + ID3V2_FRAME_WORS = GF_4CC('W','O','R','S'), + ID3V2_FRAME_WPAY = GF_4CC('W','P','A','Y'), + ID3V2_FRAME_WPUB = GF_4CC('W','P','U','B'), + ID3V2_FRAME_WXXX = GF_4CC('W','X','X','X') +} GF_ID3v2FrameType; + +static void add_text(char **buffer, u32 *size, u32 *pos, char *msg, u32 msg_len) +{ + if (*pos+msg_len>*size) { + *size = *pos+msg_len-*size+256; + *buffer = (char *)gf_realloc(*buffer, *size); + } + strncpy((*buffer)+(*pos), msg, msg_len); + *pos += msg_len; +} + +static GF_Err id3_parse_tag(char *data, u32 length, char **output, u32 *output_size, u32 *output_pos) +{ + u32 size; + u32 pos = 0; + /* ID3VVFFFFSIZE = 13bytes + * ID3 string + * VV = Version + * F = Flags + * SIZE = 32bits size with first Most Significant bit set to 0 -> 28 bits + * Size starts AFTER this header, meaning we have to add 10 bytes + */ + if (data[pos] == 'I' && data[pos+1] == 'D' && data[pos+2] == '3') { + //u16 version = (data[pos+3]<<8)+data[pos+4]; + //Bool unsync_flag = ((data[pos+5]>>7 & 0x1) ? GF_TRUE: GF_FALSE); + Bool extended_header_flag = ((data[pos+5]>>6 & 0x1) ? GF_TRUE: GF_FALSE); + //Bool experimental_flag = ((data[pos+5]>>5 & 0x1) ? GF_TRUE: GF_FALSE); + if (data[pos+5] & 0x1F) { + return GF_NOT_SUPPORTED; + } else { + size = 10 + ((data[pos+9] & 0x7f) + ((data[pos+8] & 0x7f) << 7) + ((data[pos+7] & 0x7f) << 14) + ((data[pos+6] & 0x7f) << 21)); + } + pos += 10; + + if (extended_header_flag) { + u32 extended_size = 4 + ((data[pos]) + ((data[pos+1]) << 8) + ((data[pos+2]) << 16) + ((data[pos+3]) << 24)); + pos += extended_size; + } + + while (pos < size) { + GF_ID3v2FrameType type; + u32 frame_size, frame_pos; + Bool compression_flag, encryption_flag/*, tag_alter_preservation_flag, file_alter_preservation_flag, readonly_flag, grouping_flag*/; + frame_pos = pos; + /* parsing a frame */ + type = (GF_ID3v2FrameType)(((data[pos+3]) + ((data[pos+2]) << 8) + ((data[pos+1]) << 16) + ((data[pos]) << 24))); + pos+=4; + frame_size = 10 + ((data[pos+3]) + ((data[pos+2]) << 8) + ((data[pos+1]) << 16) + ((data[pos]) << 24)); + pos+=4; + //tag_alter_preservation_flag = ((data[pos]>>7 & 0x1) ? GF_TRUE: GF_FALSE); + //file_alter_preservation_flag = ((data[pos]>>6 & 0x1) ? GF_TRUE: GF_FALSE); + //readonly_flag = ((data[pos]>>5 & 0x1) ? GF_TRUE: GF_FALSE); + pos++; + compression_flag = ((data[pos]>>7 & 0x1) ? GF_TRUE: GF_FALSE); + encryption_flag = ((data[pos]>>6 & 0x1) ? GF_TRUE: GF_FALSE); + //grouping_flag = ((data[pos]>>5 & 0x1) ? GF_TRUE: GF_FALSE); + if (compression_flag || encryption_flag) { + /* unsupported, skip */ + pos = frame_pos + frame_size; + } else { + switch (type) { + case ID3V2_FRAME_TXXX: + add_text(output, output_size, output_pos, data+pos+3, frame_size-10-3); + pos = frame_pos + frame_size; + break; + default: + /* unsupported, skip */ + pos = frame_pos + frame_size; + } + } + } + + return GF_OK; + } else { + return GF_NOT_SUPPORTED; + } +} + +static u32 gf_m2ts_reframe_id3_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, Bool same_pts, unsigned char *data, u32 data_len, GF_M2TS_PESHeader *pes_hdr) +{ + char frame_header[256]; + char *output_text = NULL; + u32 output_len = 0; + u32 pos = 0; + GF_M2TS_PES_PCK pck; + pck.flags = 0; + if (pes->rap) pck.flags |= GF_M2TS_PES_PCK_RAP; + if (!same_pts) pck.flags |= GF_M2TS_PES_PCK_AU_START; + pck.DTS = pes->DTS; + pck.PTS = pes->PTS; + sprintf(frame_header, LLU" --> NEXT\n", pes->PTS); + add_text(&output_text, &output_len, &pos, frame_header, (u32)strlen(frame_header)); + id3_parse_tag((char *)data, data_len, &output_text, &output_len, &pos); + add_text(&output_text, &output_len, &pos, "\n\n", 2); + pck.data = (char *)output_text; + pck.data_len = pos; + pck.stream = pes; + ts->on_event(ts, GF_M2TS_EVT_PES_PCK, &pck); + gf_free(output_text); + /*we consumed all data*/ + return 0; +} static u32 gf_m2ts_sync(GF_M2TS_Demuxer *ts, Bool simple_check) { @@ -972,9 +1224,10 @@ void gf_m2ts_es_del(GF_M2TS_ES *es, GF_M2TS_Demuxer *ts) if ((pes->flags & GF_M2TS_INHERIT_PCR) && ts->ess[es->program->pcr_pid]==es) ts->ess[es->program->pcr_pid] = NULL; - if (pes->data) gf_free(pes->data); + if (pes->pck_data) gf_free(pes->pck_data); if (pes->prev_data) gf_free(pes->prev_data); if (pes->buf) gf_free(pes->buf); + if (pes->temi_tc_desc) gf_free(pes->temi_tc_desc); } if (es->slcfg) gf_free(es->slcfg); gf_free(es); @@ -993,6 +1246,15 @@ static void gf_m2ts_reset_sdt(GF_M2TS_Demuxer *ts) static void gf_m2ts_section_complete(GF_M2TS_Demuxer *ts, GF_M2TS_SectionFilter *sec, GF_M2TS_SECTION_ES *ses) { + //seek mode, only process PAT and PMT + if (ts->start_range && (sec->section[0] != GF_M2TS_TABLE_ID_PAT) && (sec->section[0] != GF_M2TS_TABLE_ID_PMT)) { + /*clean-up (including broken sections)*/ + if (sec->section) gf_free(sec->section); + sec->section = NULL; + sec->length = sec->received = 0; + return; + } + if (!sec->process_section) { if ((ts->on_event && (sec->section[0]==GF_M2TS_TABLE_ID_AIT)) ) { #ifdef GPAC_ENABLE_DSMCC @@ -1566,6 +1828,117 @@ error_exit: return; } +static GF_M2TS_MetadataPointerDescriptor *gf_m2ts_read_metadata_pointer_descriptor(GF_BitStream *bs, u32 length) +{ + u32 size; + GF_M2TS_MetadataPointerDescriptor *d; + GF_SAFEALLOC(d, GF_M2TS_MetadataPointerDescriptor); + d->application_format = gf_bs_read_u16(bs); + size = 2; + if (d->application_format == 0xFFFF) { + d->application_format_identifier = gf_bs_read_u32(bs); + size += 4; + } + d->format = gf_bs_read_u8(bs); + size += 1; + if (d->format == 0xFF) { + d->format_identifier = gf_bs_read_u32(bs); + size += 4; + } + d->service_id = gf_bs_read_u8(bs); + d->locator_record_flag = (gf_bs_read_int(bs, 1) ? GF_TRUE : GF_FALSE); + d->carriage_flag = (enum metadata_carriage)gf_bs_read_int(bs, 2); + gf_bs_read_int(bs, 5); /*reserved */ + size += 2; + if (d->locator_record_flag) { + d->locator_length = gf_bs_read_u8(bs); + d->locator_data = (char *)gf_malloc(d->locator_length); + size += 1 + d->locator_length; + gf_bs_read_data(bs, d->locator_data, d->locator_length); + } + if (d->carriage_flag != 3) { + d->program_number = gf_bs_read_u16(bs); + size += 2; + } + if (d->carriage_flag == 1) { + d->ts_location = gf_bs_read_u16(bs); + d->ts_id = gf_bs_read_u16(bs); + size += 4; + } + if (length-size > 0) { + d->data_size = length-size; + d->data = (char *)gf_malloc(d->data_size); + gf_bs_read_data(bs, d->data, d->data_size); + } + return d; +} + +void gf_m2ts_metadata_pointer_descriptor_del(GF_M2TS_MetadataPointerDescriptor *metapd) +{ + if (metapd) { + if (metapd->locator_data) gf_free(metapd->locator_data); + if (metapd->data) gf_free(metapd->data); + gf_free(metapd); + } +} + +static GF_M2TS_MetadataDescriptor *gf_m2ts_read_metadata_descriptor(GF_BitStream *bs, u32 length) +{ + u32 size; + GF_M2TS_MetadataDescriptor *d; + GF_SAFEALLOC(d, GF_M2TS_MetadataDescriptor); + d->application_format = gf_bs_read_u16(bs); + size = 2; + if (d->application_format == 0xFFFF) { + d->application_format_identifier = gf_bs_read_u32(bs); + size += 4; + } + d->format = gf_bs_read_u8(bs); + size += 1; + if (d->format == 0xFF) { + d->format_identifier = gf_bs_read_u32(bs); + size += 4; + } + d->service_id = gf_bs_read_u8(bs); + d->decoder_config_flags = gf_bs_read_int(bs, 3); + d->dsmcc_flag = (gf_bs_read_int(bs, 1) ? GF_TRUE : GF_FALSE); + gf_bs_read_int(bs, 4); /* reserved */ + size += 2; + if (d->dsmcc_flag) { + d->service_id_record_length = gf_bs_read_u8(bs); + d->service_id_record = (char *)gf_malloc(d->service_id_record_length); + size += 1 + d->service_id_record_length; + gf_bs_read_data(bs, d->service_id_record, d->service_id_record_length); + } + if (d->decoder_config_flags == 1) { + d->decoder_config_length = gf_bs_read_u8(bs); + d->decoder_config = (char *)gf_malloc(d->decoder_config_length); + size += 1 + d->decoder_config_length; + gf_bs_read_data(bs, d->decoder_config, d->decoder_config_length); + } + if (d->decoder_config_flags == 3) { + d->decoder_config_id_length = gf_bs_read_u8(bs); + d->decoder_config_id = (char *)gf_malloc(d->decoder_config_id_length); + size += 1 + d->decoder_config_id_length; + gf_bs_read_data(bs, d->decoder_config_id, d->decoder_config_id_length); + } + if (d->decoder_config_flags == 4) { + d->decoder_config_service_id = gf_bs_read_u8(bs); + size++; + } + return d; +} + +void gf_m2ts_metadata_descriptor_del(GF_M2TS_MetadataDescriptor *metad) +{ + if (metad) { + if (metad->service_id_record) gf_free(metad->service_id_record); + if (metad->decoder_config) gf_free(metad->decoder_config); + if (metad->decoder_config_id) gf_free(metad->decoder_config_id); + gf_free(metad); + } +} + static void gf_m2ts_process_pmt(GF_M2TS_Demuxer *ts, GF_M2TS_SECTION_ES *pmt, GF_List *sections, u8 table_id, u16 ex_table_id, u8 version_number, u8 last_section_number, u32 status) { u32 info_length, pos, desc_len, evt_type, nb_es,i; @@ -1621,6 +1994,21 @@ static void gf_m2ts_process_pmt(GF_M2TS_Demuxer *ts, GF_M2TS_SECTION_ES *pmt, GF gf_odf_desc_del((GF_Descriptor *)pmt->program->pmt_iod); pmt->program->pmt_iod = NULL; } + } else if (tag == GF_M2TS_METADATA_POINTER_DESCRIPTOR) { + GF_BitStream *metadatapd_bs; + GF_M2TS_MetadataPointerDescriptor *metapd; + metadatapd_bs = gf_bs_new((char *)data+6, len, GF_BITSTREAM_READ); + metapd = gf_m2ts_read_metadata_pointer_descriptor(metadatapd_bs, len); + gf_bs_del(metadatapd_bs); + if (metapd->application_format_identifier == GF_4CC('I', 'D', '3', ' ') && + metapd->format_identifier == GF_4CC('I', 'D', '3', ' ') && + metapd->carriage_flag == METADATA_CARRIAGE_SAME_TS) { + /*HLS ID3 Metadata */ + pmt->program->metadata_pointer_descriptor = metapd; + } else { + /* don't know what to do with it for now, delete */ + gf_m2ts_metadata_pointer_descriptor_del(metapd); + } } else { #else { @@ -1677,6 +2065,7 @@ static void gf_m2ts_process_pmt(GF_M2TS_Demuxer *ts, GF_M2TS_SECTION_ES *pmt, GF case GF_M2TS_AUDIO_AC3: case GF_M2TS_AUDIO_DTS: case GF_M2TS_SUBTITLE_DVB: + case GF_M2TS_METADATA_PES: GF_SAFEALLOC(pes, GF_M2TS_PES); pes->cc = -1; pes->flags = GF_M2TS_ES_IS_PES; @@ -1775,10 +2164,10 @@ static void gf_m2ts_process_pmt(GF_M2TS_Demuxer *ts, GF_M2TS_SECTION_ES *pmt, GF reg_desc_format = GF_4CC(data[2], data[3], data[4], data[5]); /*cf http://www.smpte-ra.org/mpegreg/mpegreg.html*/ switch (reg_desc_format) { - case GF_4CC(0x41, 0x43, 0x2D, 0x33): + case GF_4CC('A', 'C', '-', '3'): es->stream_type = GF_M2TS_AUDIO_AC3; break; - case GF_4CC(0x56, 0x43, 0x2D, 0x31): + case GF_4CC('V', 'C', '-', '1'): es->stream_type = GF_M2TS_VIDEO_VC1; break; } @@ -1820,6 +2209,25 @@ static void gf_m2ts_process_pmt(GF_M2TS_Demuxer *ts, GF_M2TS_SECTION_ES *pmt, GF if (pes) pes->depends_on_pid = (data[4] & 0x3F) + es->program->pmt_pid; break; + case GF_M2TS_METADATA_DESCRIPTOR: + { + GF_BitStream *metadatad_bs; + GF_M2TS_MetadataDescriptor *metad; + metadatad_bs = gf_bs_new((char *)data+2, len, GF_BITSTREAM_READ); + metad = gf_m2ts_read_metadata_descriptor(metadatad_bs, len); + gf_bs_del(metadatad_bs); + if (metad->application_format_identifier == GF_4CC('I', 'D', '3', ' ') && + metad->format_identifier == GF_4CC('I', 'D', '3', ' ')) { + /*HLS ID3 Metadata */ + pes->metadata_descriptor = metad; + pes->stream_type = GF_M2TS_METADATA_ID3_HLS; + } else { + /* don't know what to do with it for now, delete */ + gf_m2ts_metadata_descriptor_del(metad); + } + } + break; + default: GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS] skipping descriptor (0x%x) not supported\n", tag)); break; @@ -2075,24 +2483,53 @@ void gf_m2ts_pes_header(GF_M2TS_PES *pes, unsigned char *data, u32 data_size, GF } } +static void gf_m2ts_flush_temi(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes) +{ + GF_M2TS_TemiTimecodeDescriptor temi_tc; + GF_BitStream *bs = gf_bs_new(pes->temi_tc_desc, pes->temi_tc_desc_len, GF_BITSTREAM_READ); + u32 has_timestamp = gf_bs_read_int(bs, 2); + u32 has_ntp = gf_bs_read_int(bs, 1); + u32 has_ptp = gf_bs_read_int(bs, 1); + u32 has_timecode = gf_bs_read_int(bs, 2); + + memset(&temi_tc, 0, sizeof(GF_M2TS_TemiTimecodeDescriptor)); + temi_tc.force_reload = gf_bs_read_int(bs, 1); + temi_tc.is_paused = gf_bs_read_int(bs, 1); + temi_tc.is_discontinuity = gf_bs_read_int(bs, 1); + gf_bs_read_int(bs, 7); + temi_tc.timeline_id = gf_bs_read_int(bs, 8); + if (has_timestamp) { + temi_tc.media_timescale = gf_bs_read_u32(bs); + if (has_timestamp==2) + temi_tc.media_timestamp = gf_bs_read_u64(bs); + else + temi_tc.media_timestamp = gf_bs_read_u32(bs); + } + temi_tc.pes_pts = pes->PTS; + gf_bs_del(bs); + pes->temi_tc_desc_len = 0; + ts->on_event(ts, GF_M2TS_EVT_TEMI_TIMECODE, &temi_tc); +} + static void gf_m2ts_flush_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes) { GF_M2TS_PESHeader pesh; /*we need at least a full, valid start code !!*/ - if ((pes->data_len >= 4) && !pes->data[0] && !pes->data[1] && (pes->data[2]==0x1)) { + if ((pes->pck_data_len >= 4) && !pes->pck_data[0] && !pes->pck_data[1] && (pes->pck_data[2]==0x1)) { u32 len; - u32 stream_id = pes->data[3] | 0x100; + u32 stream_id = pes->pck_data[3] | 0x100; if ((stream_id >= 0x1c0 && stream_id <= 0x1df) || (stream_id >= 0x1e0 && stream_id <= 0x1ef) || (stream_id == 0x1bd) || + (stream_id == 0x10d) || /*SL-packetized*/ - ((u8) pes->data[3]==0xfa) + ((u8) pes->pck_data[3]==0xfa) ) { Bool same_pts = 0; /*OK read header*/ - gf_m2ts_pes_header(pes, pes->data+3, pes->data_len-3, &pesh); + gf_m2ts_pes_header(pes, pes->pck_data+3, pes->pck_data_len-3, &pesh); /*send PES timing*/ if (ts->notify_pes_timing) { @@ -2141,52 +2578,56 @@ static void gf_m2ts_flush_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes) /*3-byte start-code + 6 bytes header + hdr extensions*/ len = 9 + pesh.hdr_data_len; - if ((u8) pes->data[3]==0xfa) { + if ((u8) pes->pck_data[3]==0xfa) { GF_M2TS_SL_PCK sl_pck; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS] SL Packet in PES for %d - ES ID %d\n", pes->pid, pes->mpeg4_es_id)); - if (pes->data_len > len) { - sl_pck.data = (char *)pes->data + len; - sl_pck.data_len = pes->data_len - len; + if (pes->pck_data_len > len) { + sl_pck.data = (char *)pes->pck_data + len; + sl_pck.data_len = pes->pck_data_len - len; sl_pck.stream = (GF_M2TS_ES *)pes; if (ts->on_event) ts->on_event(ts, GF_M2TS_EVT_SL_PCK, &sl_pck); } else { - GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[MPEG-2 TS] Bad SL Packet size: (%d indicated < %d header)\n", pes->pid, pes->data_len, len)); + GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[MPEG-2 TS] Bad SL Packet size: (%d indicated < %d header)\n", pes->pid, pes->pck_data_len, len)); } } else if (pes->reframe) { - u32 remain; + u32 remain = 0; u32 offset = len; - if (pesh.pck_len && (pesh.pck_len-3-pesh.hdr_data_len != pes->data_len-len)) { - GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d PES payload size %d but received %d bytes\n", pes->pid, (u32) ( pesh.pck_len-3-pesh.hdr_data_len), pes->data_len-len)); + if (pesh.pck_len && (pesh.pck_len-3-pesh.hdr_data_len != pes->pck_data_len-len)) { + GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d PES payload size %d but received %d bytes\n", pes->pid, (u32) ( pesh.pck_len-3-pesh.hdr_data_len), pes->pck_data_len-len)); } - + //copy over the remaining of previous PES payload before start of this PES payload if (pes->prev_data_len) { assert(pes->prev_data_len < len); offset = len - pes->prev_data_len; - memcpy(pes->data + offset, pes->prev_data, pes->prev_data_len); + memcpy(pes->pck_data + offset, pes->prev_data, pes->prev_data_len); } - remain = pes->reframe(ts, pes, same_pts, pes->data+offset, pes->data_len-offset); + if (pes->temi_tc_desc_len) + gf_m2ts_flush_temi(ts, pes); + + if (! ts->start_range) + remain = pes->reframe(ts, pes, same_pts, pes->pck_data+offset, pes->pck_data_len-offset, &pesh); + + //CLEANUP alloc stuff if (pes->prev_data) gf_free(pes->prev_data); pes->prev_data = NULL; pes->prev_data_len = 0; if (remain) { pes->prev_data = gf_malloc(sizeof(char)*remain); - memcpy(pes->prev_data, pes->data + pes->data_len - remain, remain); + memcpy(pes->prev_data, pes->pck_data + pes->pck_data_len - remain, remain); pes->prev_data_len = remain; } } } else { GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS] PES %d: unknown stream ID %08X\n", pes->pid, stream_id)); } - } else if (pes->data) { + } else if (pes->pck_data_len) { GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PES %d: Bad PES Header, discarding packet (maybe stream is encrypted ?)\n", pes->pid)); } - if (pes->data) gf_free(pes->data); - pes->data = NULL; - pes->data_len = 0; + pes->pck_data_len = 0; pes->pes_len = 0; pes->rap = 0; } @@ -2215,16 +2656,14 @@ static void gf_m2ts_process_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, GF_M2TS_H disc = 0; if (disc) { if (hdr->payload_start) { - if (pes->data) { + if (pes->pck_data_len) { GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PES %d: Packet discontinuity (%d expected - got %d) - may have lost end of previous PES\n", pes->pid, expect_cc, hdr->continuity_counter)); } } else { - if (pes->data) { + if (pes->pck_data_len) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[MPEG-2 TS] PES %d: Packet discontinuity (%d expected - got %d) - trashing PES packet\n", pes->pid, expect_cc, hdr->continuity_counter)); - gf_free(pes->data); - pes->data = NULL; } - pes->data_len = 0; + pes->pck_data_len = 0; pes->pes_len = 0; pes->cc = -1; return; @@ -2241,44 +2680,44 @@ static void gf_m2ts_process_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, GF_M2TS_H pes->before_last_pcr_value_pck_number = pes->program->before_last_pcr_value_pck_number; pes->last_pcr_value = pes->program->last_pcr_value; pes->last_pcr_value_pck_number = pes->program->last_pcr_value_pck_number; - } else if (pes->pes_len && (pes->data_len + data_size == pes->pes_len + 6)) { + } else if (pes->pes_len && (pes->pck_data_len + data_size == pes->pes_len + 6)) { /* 6 = startcode+stream_id+length*/ /*reassemble pes*/ - if (pes->data) pes->data = (u8*)gf_realloc(pes->data, pes->data_len+data_size); - else pes->data = (u8*)gf_malloc(data_size); - memcpy(pes->data+pes->data_len, data, data_size); - pes->data_len += data_size; + if (pes->pck_data_len + data_size > pes->pck_alloc_len) { + pes->pck_alloc_len = pes->pck_data_len + data_size; + pes->pck_data = (u8*)gf_realloc(pes->pck_data, pes->pck_alloc_len); + } + memcpy(pes->pck_data+pes->pck_data_len, data, data_size); + pes->pck_data_len += data_size; /*force discard*/ data_size = 0; flush_pes = 1; } /*PES first fragment: flush previous packet*/ - if (flush_pes && pes->data) { + if (flush_pes && pes->pck_data_len) { gf_m2ts_flush_pes(ts, pes); if (!data_size) return; } /*we need to wait for first packet of PES*/ - if (!pes->data_len && !hdr->payload_start) { + if (!pes->pck_data_len && !hdr->payload_start) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d: Waiting for PES header, trashing data\n", hdr->pid)); return; } /*reassemble*/ - if (pes->data){ - pes->data = (u8*)gf_realloc(pes->data, pes->data_len+data_size); - //fprintf(stderr, "[MPEG-2 TS] REALLOC \n"); - }else{ - pes->data = (u8*)gf_malloc(data_size); + if (pes->pck_data_len + data_size > pes->pck_alloc_len ){ + pes->pck_alloc_len = pes->pck_data_len + data_size; + pes->pck_data = (u8*)gf_realloc(pes->pck_data, pes->pck_alloc_len); } - memcpy(pes->data+pes->data_len, data, data_size); - pes->data_len += data_size; + memcpy(pes->pck_data + pes->pck_data_len, data, data_size); + pes->pck_data_len += data_size; if (paf && paf->random_access_indicator) pes->rap = 1; - if (hdr->payload_start && !pes->pes_len && (pes->data_len>=6)) { - pes->pes_len = (pes->data[4]<<8) | pes->data[5]; + if (hdr->payload_start && !pes->pes_len && (pes->pck_data_len>=6)) { + pes->pes_len = (pes->pck_data[4]<<8) | pes->pck_data[5]; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d: Got PES packet len %d\n", pes->pid, pes->pes_len)); - if (pes->pes_len + 6 == pes->data_len) { + if (pes->pes_len + 6 == pes->pck_data_len) { gf_m2ts_flush_pes(ts, pes); } } @@ -2287,7 +2726,7 @@ static void gf_m2ts_process_pes(GF_M2TS_Demuxer *ts, GF_M2TS_PES *pes, GF_M2TS_H static void gf_m2ts_get_adaptation_field(GF_M2TS_Demuxer *ts, GF_M2TS_AdaptationField *paf, unsigned char *data, u32 size, u32 pid) { - char *af_extension; + unsigned char *af_extension; paf->discontinuity_indicator = (data[0] & 0x80) ? 1 : 0; paf->random_access_indicator = (data[0] & 0x40) ? 1 : 0; paf->priority_indicator = (data[0] & 0x20) ? 1 : 0; @@ -2342,7 +2781,6 @@ static void gf_m2ts_get_adaptation_field(GF_M2TS_Demuxer *ts, GF_M2TS_Adaptation if (! af_desc_not_present) { while (afext_bytes) { - char URL[255]; GF_BitStream *bs; char *desc; u8 desc_tag = af_extension[0]; @@ -2351,21 +2789,23 @@ static void gf_m2ts_get_adaptation_field(GF_M2TS_Demuxer *ts, GF_M2TS_Adaptation GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d: Bad Adaptation Descriptor found (tag %d) size is %d but only %d bytes available\n", pid, desc_tag, desc_len, afext_bytes)); break; } - desc = af_extension+2; + desc = (char *) af_extension+2; bs = gf_bs_new(desc, desc_len, GF_BITSTREAM_READ); switch (desc_tag) { case GF_M2TS_AFDESC_LOCATION_DESCRIPTOR: { - //u32 timeline_id; - Bool external_url, use_base_temi_url; - /*Bool force_reload = */gf_bs_read_int(bs, 1); - /*Bool is_announcement = */gf_bs_read_int(bs, 1); - /*Bool splicing_flag = */gf_bs_read_int(bs, 1); + Bool external_url , use_base_temi_url; + char URL[255]; + GF_M2TS_TemiLocationDescriptor temi_loc; + memset(&temi_loc, 0, sizeof(GF_M2TS_TemiLocationDescriptor) ); + temi_loc.reload_external = gf_bs_read_int(bs, 1); + temi_loc.is_announce = gf_bs_read_int(bs, 1); + temi_loc.is_splicing = gf_bs_read_int(bs, 1); external_url = gf_bs_read_int(bs, 1); use_base_temi_url = gf_bs_read_int(bs, 1); gf_bs_read_int(bs, 3); //reserved - /*timeline_id = */gf_bs_read_int(bs, 8); + temi_loc.timeline_id = gf_bs_read_int(bs, 8); if (!external_url) { if (!use_base_temi_url) { char *_url = URL; @@ -2384,13 +2824,24 @@ static void gf_m2ts_get_adaptation_field(GF_M2TS_Demuxer *ts, GF_M2TS_Adaptation gf_bs_read_data(bs, _url, url_len); _url[url_len] = 0; } + temi_loc.external_URL = URL; } + GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d AF Location descriptor found - URL %s\n", pid, URL)); + if (ts->on_event) ts->on_event(ts, GF_M2TS_EVT_TEMI_LOCATION, &temi_loc); } - GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d AF Location descriptor found - URL %s\n", pid, URL)); break; case GF_M2TS_AFDESC_TIMELINE_DESCRIPTOR: - - GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d AF Timeline descriptor found\n", pid)); + if (ts->ess[pid] && (ts->ess[pid]->flags & GF_M2TS_ES_IS_PES)) { + GF_M2TS_PES *pes = (GF_M2TS_PES *) ts->ess[pid]; + if (pes->temi_tc_desc_alloc_size < desc_len) { + pes->temi_tc_desc = gf_realloc(pes->temi_tc_desc, desc_len); + pes->temi_tc_desc_alloc_size = desc_len; + } + memcpy(pes->temi_tc_desc, desc, desc_len); + pes->temi_tc_desc_len = desc_len; + + GF_LOG(GF_LOG_INFO, GF_LOG_CONTAINER, ("[MPEG-2 TS] PID %d AF Timeline descriptor found\n", pid)); + } break; } gf_bs_del(bs); @@ -2667,9 +3118,7 @@ void gf_m2ts_reset_parsers(GF_M2TS_Demuxer *ts) if (!pes || (pes->pid==pes->program->pmt_pid)) continue; pes->cc = -1; pes->frame_state = 0; - if (pes->data) gf_free(pes->data); - pes->data = NULL; - pes->data_len = 0; + pes->pck_data_len = 0; if (pes->prev_data) gf_free(pes->prev_data); pes->prev_data = NULL; pes->prev_data_len = 0; @@ -2677,7 +3126,10 @@ void gf_m2ts_reset_parsers(GF_M2TS_Demuxer *ts) pes->pes_len = pes->pes_end_packet_number = pes->pes_start_packet_number = 0; if (pes->buf) gf_free(pes->buf); pes->buf = NULL; - pes->buf_len = 0; + if (pes->temi_tc_desc) gf_free(pes->temi_tc_desc); + pes->temi_tc_desc = NULL; + pes->temi_tc_desc_len = pes->temi_tc_desc_alloc_size = 0; + pes->before_last_pcr_value = pes->before_last_pcr_value_pck_number = 0; pes->last_pcr_value = pes->last_pcr_value_pck_number = 0; if (pes->program->pcr_pid==pes->pid) { @@ -2730,6 +3182,7 @@ GF_Err gf_m2ts_set_pes_framing(GF_M2TS_PES *pes, u32 mode) case GF_M2TS_PES_FRAMING_SKIP_NO_RESET: pes->reframe = NULL; break; + case GF_M2TS_PES_FRAMING_DEFAULT_NAL: case GF_M2TS_PES_FRAMING_DEFAULT: default: switch (pes->stream_type) { @@ -2745,10 +3198,12 @@ GF_Err gf_m2ts_set_pes_framing(GF_M2TS_PES *pes, u32 mode) case GF_M2TS_VIDEO_H264: case GF_M2TS_VIDEO_SVC: pes->reframe = gf_m2ts_reframe_avc_h264; + pes->single_nal_mode = (mode==GF_M2TS_PES_FRAMING_DEFAULT_NAL) ? 1 : 0; break; case GF_M2TS_VIDEO_HEVC: case GF_M2TS_VIDEO_SHVC: pes->reframe = gf_m2ts_reframe_hevc; + pes->single_nal_mode = (mode==GF_M2TS_PES_FRAMING_DEFAULT_NAL) ? 1 : 0; break; case GF_M2TS_AUDIO_AAC: pes->reframe = gf_m2ts_reframe_aac_adts; @@ -2760,6 +3215,10 @@ GF_Err gf_m2ts_set_pes_framing(GF_M2TS_PES *pes, u32 mode) case GF_M2TS_PRIVATE_DATA: /* TODO: handle DVB subtitle streams */ + break; + case GF_M2TS_METADATA_ID3_HLS: + pes->reframe = gf_m2ts_reframe_id3_pes; + break; default: pes->reframe = gf_m2ts_reframe_default; break; @@ -3115,23 +3574,27 @@ static u32 gf_m2ts_demuxer_run(void *_p) u32 pos = 0; GF_BitStream *ts_bs = NULL; - if (ts->start_range && ts->duration) { - Double perc = ts->start_range / (1000 * ts->duration); - pos = (u32) (s64) (perc * ts->file_size); - /*align to TS packet size*/ - while (pos%188) pos++; - if (pos>=ts->file_size) { - ts->start_range = 0; - pos = 0; - } - } - if (ts->file) ts_bs = gf_bs_from_file(ts->file, GF_BITSTREAM_READ); else ts_bs = gf_bs_new(ts->ts_data_chunk, ts->ts_data_chunk_size, GF_BITSTREAM_READ); while (ts->run_state && gf_bs_available(ts_bs) && !ts->force_file_refresh) { + + if (ts->start_range && ts->duration) { + Double perc = ts->start_range / (1000 * ts->duration); + pos = (u32) (s64) (perc * ts->file_size); + /*align to TS packet size*/ + pos/=188; + pos*=188; + + if (pos>=ts->file_size) { + pos = 0; + } + ts->start_range = 0; + gf_bs_seek(ts_bs, pos); + } + /*m2ts chunks by chunks*/ size = gf_bs_read_data(ts_bs, data, 188); if (!size && (ts->loop_demux == 1)) { @@ -3588,7 +4051,7 @@ Bool gf_m2ts_probe_file(const char *fileName) u32 size; u8 *mem_address; if (sscanf(fileName, "gmem://%d@%p", &size, &mem_address) != 2) { - return GF_URL_ERROR; + return GF_FALSE; } while (size>188 && count) { if (mem_address[0] != 0x47) @@ -3597,7 +4060,7 @@ Bool gf_m2ts_probe_file(const char *fileName) size-=188; count--; } - return 1; + return GF_TRUE; } t = gf_f64_open(fileName, "rb"); @@ -3614,7 +4077,7 @@ Bool gf_m2ts_probe_file(const char *fileName) else count--; } if (t) fclose(t); - return count ? 0 : 1; + return count ? GF_FALSE : GF_TRUE; } static void rewrite_pts_dts(unsigned char *ptr, u64 TS) diff --git a/src/media_tools/reedsolomon.c b/src/media_tools/reedsolomon.c index d6f3f5f..af40457 100644 --- a/src/media_tools/reedsolomon.c +++ b/src/media_tools/reedsolomon.c @@ -322,7 +322,7 @@ Find_Roots (void) if (sum == 0) { ErrorLocs[NErrors] = (255-r); NErrors++; - if (DEBUG) fprintf(stderr, "Root found at r = %d, (255-r) = %d\n", r, (255-r)); + if (RS_DEBUG) fprintf(stderr, "Root found at r = %d, (255-r) = %d\n", r, (255-r)); } } } @@ -363,7 +363,7 @@ correct_errors_erasures (unsigned char codeword[], /* first check for illegal error locs */ for (r = 0; r < NErrors; r++) { if (ErrorLocs[r] >= csize) { - if (DEBUG) fprintf(stderr, "Error loc i=%d outside of codeword length %d\n", i, csize); + if (RS_DEBUG) fprintf(stderr, "Error loc i=%d outside of codeword length %d\n", i, csize); return(0); } } @@ -384,14 +384,14 @@ correct_errors_erasures (unsigned char codeword[], } err = gmult(num, ginv(denom)); - if (DEBUG) fprintf(stderr, "Error magnitude %#x at loc %d\n", err, csize-i); + if (RS_DEBUG) fprintf(stderr, "Error magnitude %#x at loc %d\n", err, csize-i); codeword[csize-i-1] ^= err; } return(1); } else { - if (DEBUG && NErrors) fprintf(stderr, "Uncorrectable codeword\n"); + if (RS_DEBUG && NErrors) fprintf(stderr, "Uncorrectable codeword\n"); return(0); } } @@ -413,7 +413,7 @@ correct_errors_erasures (unsigned char codeword[], /* generator polynomial */ int genPoly[MAXDEG*2]; - int DEBUG = FALSE; + int RS_DEBUG = FALSE; static void compute_genpoly (int nbytes, int genpoly[]); diff --git a/src/media_tools/webvtt.c b/src/media_tools/webvtt.c index 81b1017..3c8867e 100644 --- a/src/media_tools/webvtt.c +++ b/src/media_tools/webvtt.c @@ -90,6 +90,7 @@ void vtcu_del(GF_Box *s) if (box->id) gf_isom_box_del((GF_Box *)box->id); if (box->settings) gf_isom_box_del((GF_Box *)box->settings); if (box->payload) gf_isom_box_del((GF_Box *)box->payload); + gf_free(s); } void vtte_del(GF_Box *s) @@ -587,6 +588,7 @@ static GF_WebVTTCue *gf_webvtt_cue_new() return cue; } +GF_EXPORT void gf_webvtt_cue_del(GF_WebVTTCue * cue) { if (cue) { @@ -1245,6 +1247,7 @@ GF_Err gf_webvtt_dump_iso_sample(FILE *dump, u32 timescale, GF_ISOSample *iso_sa return GF_OK; } +GF_EXPORT GF_List *gf_webvtt_parse_iso_cues(GF_ISOSample *iso_sample, u64 start) { return gf_webvtt_parse_cues_from_data(iso_sample->data, iso_sample->dataLength, start); @@ -1409,7 +1412,7 @@ static GF_Err gf_webvtt_parser_dump_finalize(GF_WebVTTParser *parser, u64 durati return GF_OK; } -void gf_webvtt_dump_cue(void *user, GF_WebVTTCue *cue) +static void gf_webvtt_dump_cue(void *user, GF_WebVTTCue *cue) { FILE *dump = (FILE *)user; if (!cue || !dump) return; @@ -1431,7 +1434,7 @@ void gf_webvtt_dump_cue(void *user, GF_WebVTTCue *cue) fprintf(dump, "\n"); } -GF_Err gf_webvtt_dump_cues(FILE *dump, GF_List *cues) +static GF_Err gf_webvtt_dump_cues(FILE *dump, GF_List *cues) { u32 i; for (i = 0; i < gf_list_count(cues); i++) { diff --git a/src/scene_manager/loader_bt.c b/src/scene_manager/loader_bt.c index 1b9405c..c5a86a6 100644 --- a/src/scene_manager/loader_bt.c +++ b/src/scene_manager/loader_bt.c @@ -3493,7 +3493,7 @@ static GF_Err gf_sm_load_bt_initialize(GF_SceneLoader *load, const char *str, Bo parser->initialized = 0; return GF_OK; } - strncpy(BOM, str, 5); + strncpy((char *) BOM, str, 5); } /*0: no unicode, 1: UTF-16BE, 2: UTF-16LE*/ diff --git a/src/scene_manager/scene_dump.c b/src/scene_manager/scene_dump.c index 6653309..bde2e69 100644 --- a/src/scene_manager/scene_dump.c +++ b/src/scene_manager/scene_dump.c @@ -560,7 +560,7 @@ static void gf_dump_vrml_sffield(GF_SceneDumper *sdump, u32 type, void *ptr, Boo u16 *uniLine; str = (char*)((SFScript *)ptr)->script_text; len = (u32)strlen(str); - uniLine = (u16*)gf_malloc(sizeof(short) * len); + uniLine = (u16*)gf_malloc(sizeof(short) * (len+1)); _len = gf_utf8_mbstowcs(uniLine, len, (const char **) &str); if (_len != (size_t) -1) { len = (u32) _len; diff --git a/src/scene_manager/swf_parse.c b/src/scene_manager/swf_parse.c index 4211478..7425d84 100644 --- a/src/scene_manager/swf_parse.c +++ b/src/scene_manager/swf_parse.c @@ -105,7 +105,7 @@ static void swf_init_decompress(SWFReader *read) memset(dst, 0, sizeof(char)*8); gf_bs_read_data(read->bs, src, size); dst_size -= 8; - uncompress(dst+8, (uLongf *)&dst_size, src, size); + uncompress((Bytef *) dst+8, (uLongf *)&dst_size, (Bytef *) src, size); dst_size += 8; gf_free(src); read->mem = dst; @@ -2105,7 +2105,7 @@ static GF_Err swf_def_bits_jpeg(SWFReader *read, u32 version) osize = w*h; dst = gf_malloc(sizeof(char)*osize); - uncompress(dst, (uLongf *) &osize, buf, AlphaPlaneSize); + uncompress((Bytef *) dst, (uLongf *) &osize, buf, AlphaPlaneSize); /*write alpha channel*/ for (j=0; jsgprivate->UserCallback) node->sgprivate->UserCallback(node, NULL, 1); + + if (node->sgprivate->scenegraph && node->sgprivate->scenegraph->NodeCallback) + node->sgprivate->scenegraph->NodeCallback(node->sgprivate->scenegraph->userpriv, GF_SG_CALLBACK_NODE_DESTROY, node, NULL); if (node->sgprivate->interact) { if (node->sgprivate->interact->routes) { diff --git a/src/scenegraph/dom_events.c b/src/scenegraph/dom_events.c index aa1d1de..8d362f5 100644 --- a/src/scenegraph/dom_events.c +++ b/src/scenegraph/dom_events.c @@ -357,14 +357,15 @@ static void dom_event_process(GF_Node *listen, GF_DOM_Event *event, GF_Node *obs } GF_EXPORT -Bool sg_fire_dom_event(GF_DOMEventTarget *et, GF_DOM_Event *event, GF_SceneGraph *sg, GF_Node *n) +Bool gf_sg_fire_dom_event(GF_DOMEventTarget *et, GF_DOM_Event *event, GF_SceneGraph *sg, GF_Node *n) { if (et) { if (et->ptr_type==GF_DOM_EVENT_TARGET_NODE || et->ptr_type == GF_DOM_EVENT_TARGET_DOCUMENT || et->ptr_type == GF_DOM_EVENT_TARGET_XHR || et->ptr_type == GF_DOM_EVENT_TARGET_MSE_MEDIASOURCE || - et->ptr_type == GF_DOM_EVENT_TARGET_HTML_MEDIA) { + et->ptr_type == GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFER || + et->ptr_type == GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFERLIST ) { GF_Node *observer = NULL; u32 i, count, post_count; if (et->ptr_type==GF_DOM_EVENT_TARGET_NODE) { @@ -465,7 +466,7 @@ static void gf_sg_dom_event_bubble(GF_Node *node, GF_DOM_Event *event, GF_List * if (!parent) { /*top of the graph, use Document*/ if (node->sgprivate->scenegraph->RootNode==node) - sg_fire_dom_event(node->sgprivate->scenegraph->dom_evt, event, node->sgprivate->scenegraph, NULL); + gf_sg_fire_dom_event(node->sgprivate->scenegraph->dom_evt, event, node->sgprivate->scenegraph, NULL); return; } if (cur_par_idx) { @@ -477,7 +478,7 @@ static void gf_sg_dom_event_bubble(GF_Node *node, GF_DOM_Event *event, GF_List * else cur_par_idx = 0; /*if no events attached,bubble by default*/ if (parent->sgprivate->interact) { - Bool can_bubble = sg_fire_dom_event(parent->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, parent); + Bool can_bubble = gf_sg_fire_dom_event(parent->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, parent); if (!can_bubble) { return; } @@ -489,7 +490,7 @@ static void gf_sg_dom_event_bubble(GF_Node *node, GF_DOM_Event *event, GF_List * /*if no events attached,bubble by default*/ if (parent->sgprivate->interact) { Bool can_bubble; - can_bubble = sg_fire_dom_event(parent->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, parent); + can_bubble = gf_sg_fire_dom_event(parent->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, parent); if(!can_bubble) return; } gf_sg_dom_event_bubble(parent, event, use_stack, cur_par_idx); @@ -544,7 +545,7 @@ Bool gf_dom_event_fire_ex(GF_Node *node, GF_DOM_Event *event, GF_List *use_stack for (i=0; isgprivate->interact) - sg_fire_dom_event(n->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, n); + gf_sg_fire_dom_event(n->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, n); /*event has been canceled*/ if (event->event_phase & (GF_DOM_EVENT_PHASE_CANCEL|GF_DOM_EVENT_PHASE_CANCEL_ALL) ) { @@ -571,7 +572,7 @@ Bool gf_dom_event_fire_ex(GF_Node *node, GF_DOM_Event *event, GF_List *use_stack sg->abort_bubbling = GF_FALSE; if (node->sgprivate->interact) { - can_bubble = sg_fire_dom_event(node->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, node); + can_bubble = gf_sg_fire_dom_event(node->sgprivate->interact->dom_evt, event, node->sgprivate->scenegraph, node); } if ( (!node->sgprivate->interact || can_bubble) && event->bubbles) { /*bubbling phase*/ @@ -920,6 +921,23 @@ void gf_dom_event_target_del(GF_DOMEventTarget *target) gf_free(target); } +GF_DOMEventTarget *gf_dom_event_get_target_from_node(GF_Node *n) +{ + GF_DOMEventTarget *target = NULL; + //GF_HTML_MediaElement *me = html_media_element_get_from_node(c, n); + //*target = me->evt_target; + + if (!n->sgprivate->interact) { + GF_SAFEALLOC(n->sgprivate->interact, struct _node_interactive_ext); + } + if (!n->sgprivate->interact->dom_evt) { + n->sgprivate->interact->dom_evt = gf_dom_event_target_new(GF_DOM_EVENT_TARGET_NODE, n); + } + target = n->sgprivate->interact->dom_evt; + + return target; +} + #endif //GPAC_DISABLE_SVG diff --git a/src/scenegraph/dom_smjs.c b/src/scenegraph/dom_smjs.c index 315d1bd..f60032c 100644 --- a/src/scenegraph/dom_smjs.c +++ b/src/scenegraph/dom_smjs.c @@ -231,6 +231,7 @@ typedef enum { EVENT_JSPROPERTY_TRANSLATIONY = -37, EVENT_JSPROPERTY_TYPE3D = -38, EVENT_JSPROPERTY_ERROR = -39, + EVENT_JSPROPERTY_DYNAMIC_SCENE = -40, } GF_DOMEventJSProperty; typedef enum { @@ -611,7 +612,7 @@ static SMJS_FUNC_PROP_GET( dom_nodelist_getProperty) DOMNodeList *nl; u32 count; - u32 idx; + s32 idx; if (!GF_JS_InstanceOf(c, obj, &dom_rt->domNodeListClass, NULL)) { return JS_TRUE; } @@ -684,10 +685,16 @@ static JSBool sg_js_get_event_target(JSContext *c, JSObject *obj, GF_EventType e if (gf_dom_event_get_category(evtType) == GF_DOM_EVENT_MEDIA) { void gf_html_media_get_event_target(JSContext *c, JSObject *obj, GF_DOMEventTarget **target, GF_SceneGraph **sg); gf_html_media_get_event_target(c, obj, target, sg); - } else if (gf_dom_event_get_category(evtType) == GF_DOM_EVENT_MEDIASOURCE) { + if (*target && *sg) return JS_TRUE; + } + + if (gf_dom_event_get_category(evtType) == GF_DOM_EVENT_MEDIASOURCE) { void gf_mse_get_event_target(JSContext *c, JSObject *obj, GF_DOMEventTarget **target, GF_SceneGraph **sg); gf_mse_get_event_target(c, obj, target, sg); - } else if (GF_JS_InstanceOf(c, obj, &dom_rt->domDocumentClass, NULL) || is_svg_document_class(c, obj)) { + if (*target && *sg) return JS_TRUE; + } + + if (GF_JS_InstanceOf(c, obj, &dom_rt->domDocumentClass, NULL) || is_svg_document_class(c, obj)) { /*document interface*/ *sg = dom_get_doc(c, obj); if (*sg) { @@ -699,7 +706,7 @@ static JSBool sg_js_get_event_target(JSContext *c, JSObject *obj, GF_EventType e } else { return JS_TRUE; } - } else if (GF_JS_InstanceOf(c, obj, &dom_rt->domElementClass, NULL) || is_svg_element_class(c, obj)) { + } else if (GF_JS_InstanceOf(c, obj, &dom_rt->domElementClass, NULL) || is_svg_element_class(c, obj) || vrml_node) { /*Element interface*/ if (vrml_node) { *n = vrml_node; @@ -943,7 +950,13 @@ JSBool SMJS_FUNCTION_EXT(gf_sg_js_event_remove_listener, GF_Node *vrml_node) hdl = (SVG_handlerElement *) ((XMLRI*)info.far_ptr)->target; if (!hdl) continue; if (! JSVAL_IS_NULL(funval) ) { +#if (JS_VERSION>=185) + JSBool res = JS_FALSE; + if (! JS_StrictlyEqual(c, funval, *(jsval *)&hdl->js_fun_val, &res)) + continue; +#else if (funval != *(jsval *)&hdl->js_fun_val) continue; +#endif } else if (hdl->children) { txt = (GF_DOMText *) hdl->children->node; if (txt->sgprivate->tag != TAG_DOMText) continue; @@ -2394,6 +2407,12 @@ static SMJS_FUNC_PROP_GET( event_getProperty) case GF_DOM_EVENT_TARGET_MSE_MEDIASOURCE: *vp = OBJECT_TO_JSVAL(((GF_HTML_MediaSource *)evt->target)->_this); break; + case GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFER: + *vp = OBJECT_TO_JSVAL(((GF_HTML_SourceBuffer *)evt->target)->_this); + break; + case GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFERLIST: + *vp = OBJECT_TO_JSVAL(((GF_HTML_SourceBufferList *)evt->target)->_this); + break; default: break; } @@ -2410,6 +2429,12 @@ static SMJS_FUNC_PROP_GET( event_getProperty) case GF_DOM_EVENT_TARGET_MSE_MEDIASOURCE: *vp = OBJECT_TO_JSVAL(((GF_HTML_MediaSource *)evt->target)->_this); break; + case GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFER: + *vp = OBJECT_TO_JSVAL(((GF_HTML_SourceBuffer *)evt->target)->_this); + break; + case GF_DOM_EVENT_TARGET_MSE_SOURCEBUFFERLIST: + *vp = OBJECT_TO_JSVAL(((GF_HTML_SourceBufferList *)evt->target)->_this); + break; default: break; } @@ -2525,6 +2550,8 @@ static SMJS_FUNC_PROP_GET( event_getProperty) *vp = INT_TO_JSVAL(evt->detail); return JS_TRUE; case EVENT_JSPROPERTY_ERROR: *vp = INT_TO_JSVAL(evt->error_state); return JS_TRUE; + case EVENT_JSPROPERTY_DYNAMIC_SCENE: + *vp = INT_TO_JSVAL(evt->key_flags ? 1 : 0); return JS_TRUE; default: return JS_TRUE; } @@ -2834,14 +2861,14 @@ static DECL_FINALIZE(xml_http_finalize) if (!GF_JS_InstanceOf(c, obj, &dom_rt->xmlHTTPRequestClass, NULL) ) return; ctx = (XMLHTTPContext *)SMJS_GET_PRIVATE(c, obj); if (ctx) { - if (ctx->onabort) gf_js_remove_root(c, &(ctx->onabort), GF_JSGC_VAL); - if (ctx->onerror) gf_js_remove_root(c, &(ctx->onerror), GF_JSGC_VAL); - if (ctx->onload) gf_js_remove_root(c, &(ctx->onload), GF_JSGC_VAL); - if (ctx->onloadend) gf_js_remove_root(c, &(ctx->onloadend), GF_JSGC_VAL); - if (ctx->onloadstart) gf_js_remove_root(c, &(ctx->onloadstart), GF_JSGC_VAL); - if (ctx->onprogress) gf_js_remove_root(c, &(ctx->onprogress), GF_JSGC_VAL); - if (ctx->onreadystatechange) gf_js_remove_root(c, &(ctx->onreadystatechange), GF_JSGC_VAL); - if (ctx->ontimeout) gf_js_remove_root(c, &(ctx->ontimeout), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onabort)) gf_js_remove_root(c, &(ctx->onabort), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onerror)) gf_js_remove_root(c, &(ctx->onerror), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onload)) gf_js_remove_root(c, &(ctx->onload), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onloadend)) gf_js_remove_root(c, &(ctx->onloadend), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onloadstart)) gf_js_remove_root(c, &(ctx->onloadstart), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onprogress)) gf_js_remove_root(c, &(ctx->onprogress), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->onreadystatechange)) gf_js_remove_root(c, &(ctx->onreadystatechange), GF_JSGC_VAL); + if (! JSVAL_IS_NULL(ctx->ontimeout)) gf_js_remove_root(c, &(ctx->ontimeout), GF_JSGC_VAL); xml_http_reset(ctx); gf_dom_event_target_del(ctx->event_target); ctx->event_target = NULL; @@ -2871,7 +2898,7 @@ static void xml_http_fire_event(XMLHTTPContext *ctx, GF_EventType evtType) xhr_evt.type = evtType; xhr_evt.target = ctx->event_target->ptr; xhr_evt.target_type = ctx->event_target->ptr_type; - sg_fire_dom_event(ctx->event_target, &xhr_evt, ctx->owning_graph, NULL); + gf_sg_fire_dom_event(ctx->event_target, &xhr_evt, ctx->owning_graph, NULL); } static void xml_http_state_change(XMLHTTPContext *ctx) @@ -2881,7 +2908,7 @@ static void xml_http_state_change(XMLHTTPContext *ctx) jsval rval; gf_sg_lock_javascript(ctx->c, GF_TRUE); - if (ctx->onreadystatechange) + if (! JSVAL_IS_NULL(ctx->onreadystatechange)) JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onreadystatechange, 0, NULL, &rval); gf_sg_lock_javascript(ctx->c, GF_FALSE); @@ -2966,7 +2993,7 @@ static JSBool SMJS_FUNCTION(xml_http_open) ctx->readyState = XHR_READYSTATE_OPENED; xml_http_state_change(ctx); xml_http_fire_event(ctx, GF_EVENT_MEDIA_LOAD_START); - if (ctx->onloadstart) { + if (! JSVAL_IS_NULL(ctx->onloadstart) ) { jsval rval; return JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onloadstart, 0, NULL, &rval); } @@ -3085,11 +3112,11 @@ static void xml_http_terminate(XMLHTTPContext *ctx, GF_Err error) xml_http_state_change(ctx); xml_http_fire_event(ctx, GF_EVENT_LOAD); xml_http_fire_event(ctx, GF_EVENT_MEDIA_LOAD_DONE); - if (ctx->onload) { + if (! JSVAL_IS_NULL(ctx->onload)) { jsval rval; JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onload, 0, NULL, &rval); } - if (ctx->onloadend) { + if (! JSVAL_IS_NULL(ctx->onloadend)) { jsval rval; JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onloadend, 0, NULL, &rval); } @@ -3132,7 +3159,7 @@ static void xml_http_on_data(void *usr_cbk, GF_NETIO_Parameter *parameter) ctx->readyState = XHR_READYSTATE_HEADERS_RECEIVED; xml_http_state_change(ctx); xml_http_fire_event(ctx, GF_EVENT_MEDIA_PROGRESS); - if (ctx->onprogress) { + if (! JSVAL_IS_NULL(ctx->onprogress) ) { jsval rval; JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onprogress, 0, NULL, &rval); } @@ -3148,7 +3175,7 @@ static void xml_http_on_data(void *usr_cbk, GF_NETIO_Parameter *parameter) ctx->readyState = XHR_READYSTATE_HEADERS_RECEIVED; xml_http_state_change(ctx); xml_http_fire_event(ctx, GF_EVENT_MEDIA_PROGRESS); - if (ctx->onprogress) { + if (! JSVAL_IS_NULL(ctx->onprogress) ) { jsval rval; JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onprogress, 0, NULL, &rval); } @@ -3246,7 +3273,7 @@ static GF_Err xml_http_process_local(XMLHTTPContext *ctx) ctx->html_status = 404; GF_LOG(GF_LOG_ERROR, GF_LOG_SCRIPT, ("[XmlHttpRequest] cannot open local file %s\n", ctx->url)); xml_http_fire_event(ctx, GF_EVENT_ERROR); - if (ctx->onerror) { + if (! JSVAL_IS_NULL(ctx->onerror) ) { jsval rval; JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onerror, 0, NULL, &rval); } @@ -3291,6 +3318,9 @@ static GF_Err xml_http_process_local(XMLHTTPContext *ctx) par.msg_type = GF_NETIO_DATA_TRANSFERED; xml_http_on_data(ctx, &par); + if (!ctx->async) { + xml_http_terminate(ctx, GF_OK); + } return GF_OK; } @@ -3382,7 +3412,7 @@ static JSBool SMJS_FUNCTION(xml_http_abort) if (sess) gf_dm_sess_del(sess); xml_http_fire_event(ctx, GF_EVENT_ABORT); - if (ctx->onabort) { + if (! JSVAL_IS_NULL(ctx->onabort)) { jsval rval; return JS_CallFunctionValue(ctx->c, ctx->_this, ctx->onabort, 0, NULL, &rval); } @@ -3504,42 +3534,42 @@ static SMJS_FUNC_PROP_GET(xml_http_getProperty) *vp = JSVAL_VOID; switch (SMJS_ID_TO_INT(id)) { case XHR_ONABORT: - if (ctx->onabort) { + if (! JSVAL_IS_NULL(ctx->onabort)) { *vp = ctx->onabort; } return JS_TRUE; case XHR_ONERROR: - if (ctx->onerror) { + if (! JSVAL_IS_NULL(ctx->onerror)) { *vp = ctx->onerror; } return JS_TRUE; case XHR_ONLOAD: - if (ctx->onload) { + if (! JSVAL_IS_NULL(ctx->onload)) { *vp = ctx->onload; } return JS_TRUE; case XHR_ONLOADSTART: - if (ctx->onloadstart) { + if (! JSVAL_IS_NULL(ctx->onloadstart) ) { *vp = ctx->onloadstart; } return JS_TRUE; case XHR_ONLOADEND: - if (ctx->onloadend) { + if (! JSVAL_IS_NULL(ctx->onloadend)) { *vp = ctx->onloadend; } return JS_TRUE; case XHR_ONPROGRESS: - if (ctx->onprogress) { + if (! JSVAL_IS_NULL(ctx->onprogress) ) { *vp = ctx->onprogress; } return JS_TRUE; case XHR_ONREADYSTATECHANGE: - if (ctx->onreadystatechange) { + if (! JSVAL_IS_NULL(ctx->onreadystatechange)) { *vp = ctx->onreadystatechange; } return JS_TRUE; case XHR_ONTIMEOUT: - if (ctx->ontimeout) { + if (! JSVAL_IS_NULL(ctx->ontimeout)) { *vp = ctx->ontimeout; } return JS_TRUE; @@ -3682,7 +3712,7 @@ static SMJS_FUNC_PROP_GET(xml_http_getProperty) JSBool gf_set_js_eventhandler(JSContext *c, jsval vp, jsval *callbackfuncval) { if (!callbackfuncval) return JS_FALSE; - if (*callbackfuncval) { + if (! JSVAL_IS_NULL( *callbackfuncval )) { gf_js_remove_root(c, callbackfuncval, GF_JSGC_VAL); } if (JSVAL_IS_VOID(vp)) { @@ -3696,7 +3726,7 @@ JSBool gf_set_js_eventhandler(JSContext *c, jsval vp, jsval *callbackfuncval) { } else if (JSVAL_IS_OBJECT(vp)) { *callbackfuncval = vp; } - if (*callbackfuncval) { + if (! JSVAL_IS_NULL( *callbackfuncval )) { gf_js_add_root(c, callbackfuncval, GF_JSGC_VAL); } return JS_TRUE; @@ -4046,8 +4076,10 @@ static JSBool SMJS_FUNCTION(dcci_search_property) static SMJS_FUNC_PROP_GET( storage_getProperty) /*avoids gcc warning*/ - if (!id) id=0; - if (!GF_JS_InstanceOf(c, obj, &dom_rt->storageClass, NULL) ) return JS_TRUE; +#ifndef GPAC_CONFIG_DARWIN + if (!id) id=0; +#endif + if (!GF_JS_InstanceOf(c, obj, &dom_rt->storageClass, NULL) ) return JS_TRUE; *vp = JSVAL_VOID; return JS_TRUE; } @@ -4055,7 +4087,9 @@ static SMJS_FUNC_PROP_GET( storage_getProperty) static SMJS_FUNC_PROP_SET_NOVP( storage_setProperty) /*avoids gcc warning*/ - if (!id) id=0; +#ifndef GPAC_CONFIG_DARWIN + if (!id) id=0; +#endif if (!GF_JS_InstanceOf(c, obj, &dom_rt->storageClass, NULL) ) return JS_TRUE; return JS_TRUE; } @@ -4335,6 +4369,7 @@ void dom_js_load(GF_SceneGraph *scene, JSContext *c, JSObject *global) SMJS_PROPERTY_SPEC("translation_y", EVENT_JSPROPERTY_TRANSLATIONY,JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED | JSPROP_READONLY, 0, 0), SMJS_PROPERTY_SPEC("type3d", EVENT_JSPROPERTY_TYPE3D, JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED | JSPROP_READONLY, 0, 0), SMJS_PROPERTY_SPEC("error", EVENT_JSPROPERTY_ERROR, JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED | JSPROP_READONLY, 0, 0), + SMJS_PROPERTY_SPEC("dynamic_scene", EVENT_JSPROPERTY_DYNAMIC_SCENE, JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED | JSPROP_READONLY, 0, 0), SMJS_PROPERTY_SPEC(0, 0, 0, 0, 0), }; diff --git a/src/scenegraph/html5_media_smjs.c b/src/scenegraph/html5_media_smjs.c index 3648eda..d94b170 100644 --- a/src/scenegraph/html5_media_smjs.c +++ b/src/scenegraph/html5_media_smjs.c @@ -156,11 +156,11 @@ typedef enum { GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); \ if (!n || (n->sgprivate->tag != TAG_SVG_video && n->sgprivate->tag != TAG_SVG_audio)) \ { \ - return JS_TRUE; \ + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); \ } \ me = (GF_HTML_MediaElement *)html_media_element_get_from_node(c, n); \ if (!me) { \ - return JS_TRUE; \ + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); \ } #define HTML_MEDIA_JS_START HTML_MEDIA_JS_CHECK @@ -240,15 +240,15 @@ static void gf_html_media_controller_init_js(GF_HTML_MediaController *mc, JSCont mc->c = c; mc->_this = JS_NewObject(c, &html_media_rt->mediaControllerClass._class, NULL, NULL); SMJS_SET_PRIVATE(c, mc->_this, mc); - mc->buffered.c = c; - mc->buffered._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); - SMJS_SET_PRIVATE(c, mc->buffered._this, &mc->buffered); - mc->played.c = c; - mc->played._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); - SMJS_SET_PRIVATE(c, mc->played._this, &mc->played); - mc->seekable.c = c; - mc->seekable._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); - SMJS_SET_PRIVATE(c, mc->seekable._this, &mc->seekable); + mc->buffered->c = c; + mc->buffered->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); + SMJS_SET_PRIVATE(c, mc->buffered->_this, mc->buffered); + mc->played->c = c; + mc->played->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); + SMJS_SET_PRIVATE(c, mc->played->_this, mc->played); + mc->seekable->c = c; + mc->seekable->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, mc->_this); + SMJS_SET_PRIVATE(c, mc->seekable->_this, mc->seekable); } */ @@ -274,26 +274,19 @@ static void gf_html_media_element_init_js(GF_HTML_MediaElement *me, JSContext *c me->textTracks._this = JS_NewObject(c, &html_media_rt->textTrackListClass._class, NULL, me->_this); SMJS_SET_PRIVATE(c, me->textTracks._this, &me->textTracks); - me->buffered.c = c; - me->buffered._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); - SMJS_SET_PRIVATE(c, me->buffered._this, &me->buffered); + me->buffered->c = c; + me->buffered->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); + SMJS_SET_PRIVATE(c, me->buffered->_this, me->buffered); - me->played.c = c; - me->played._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); - SMJS_SET_PRIVATE(c, me->played._this, &me->played); + me->played->c = c; + me->played->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); + SMJS_SET_PRIVATE(c, me->played->_this, me->played); - me->seekable.c = c; - me->seekable._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); - SMJS_SET_PRIVATE(c, me->seekable._this, &me->seekable); + me->seekable->c = c; + me->seekable->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, me->_this); + SMJS_SET_PRIVATE(c, me->seekable->_this, me->seekable); } -/* - * TODO : Unused, create warnings on debian -static void html_media_script_error(JSContext *c, const char *msg, JSErrorReport *jserr) -{ - GF_LOG(GF_LOG_ERROR, GF_LOG_SCRIPT, ("[JavaScript] Error: %s - line %d (%s)", msg, jserr->lineno, jserr->linebuf)); -}*/ - /* Function to browse the tracks in the MediaObject associated with the Media Element and to create appropriate HTML Track objects * * \param c The JavaScript Context to create the new JS object @@ -364,6 +357,34 @@ static void html_media_element_populate_tracks(JSContext *c, GF_HTML_MediaElemen } } +/* Used to retrieve the structure implementing the GF_HTML_MediaElement interface associated with this node + * Usually this is done with the private stack of the node (see gf_node_get_private), but in this case, + * the stack already contains the rendering stack SVG_video_stack. + * So, we store the structure implementing the GF_HTML_MediaElement interface in the JavaScript context of this node, + * as a non enumeratable property named 'gpac_me_impl' + * + * \param c the global JavaScript context + * \param n the audio or video node + * \return the GF_HTML_MediaElement associated with this node in the given context + */ +static GF_HTML_MediaElement *html_media_element_get_from_node(JSContext *c, GF_Node *n) +{ + jsval vp; + JSObject *me_obj; + JSObject *node_obj; + GF_HTML_MediaElement *me = NULL; + + if ((n->sgprivate->tag == TAG_SVG_video || n->sgprivate->tag == TAG_SVG_audio) && n->sgprivate->interact && n->sgprivate->interact->js_binding) { + node_obj = (JSObject *)n->sgprivate->interact->js_binding->node; + if (node_obj) { + JS_GetProperty(c, node_obj, "gpac_me_impl", &vp); + me_obj = JSVAL_TO_OBJECT(vp); + me = (GF_HTML_MediaElement *)SMJS_GET_PRIVATE(c, me_obj); + } + } + return me; +} + static JSBool SMJS_FUNCTION(html_media_load) { SMJS_OBJ @@ -371,9 +392,19 @@ static JSBool SMJS_FUNCTION(html_media_load) GF_JS_InstanceOf(c, obj, &html_media_rt->htmlVideoElementClass, NULL) || GF_JS_InstanceOf(c, obj, &html_media_rt->htmlMediaElementClass, NULL)) { - /* mo->odm->net_service */ - } - return JS_TRUE; + MFURL mfurl; + GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); + GF_HTML_MediaElement *me = html_media_element_get_from_node(c, n); + mfurl.count = 1; + mfurl.vals = (SFURL *)gf_malloc(sizeof(SFURL)); + mfurl.vals[0].url = me->currentSrc; + mfurl.vals[0].OD_ID = GF_MEDIA_EXTERNAL_ID; + gf_mo_register(n, &mfurl, GF_FALSE, GF_FALSE); + gf_free(mfurl.vals); + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static JSBool SMJS_FUNCTION(html_media_canPlayType) @@ -383,8 +414,10 @@ static JSBool SMJS_FUNCTION(html_media_canPlayType) GF_JS_InstanceOf(c, obj, &html_media_rt->htmlVideoElementClass, NULL) || GF_JS_InstanceOf(c, obj, &html_media_rt->htmlMediaElementClass, NULL)) { - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static JSBool SMJS_FUNCTION(html_media_fastSeek) @@ -394,13 +427,23 @@ static JSBool SMJS_FUNCTION(html_media_fastSeek) GF_JS_InstanceOf(c, obj, &html_media_rt->htmlVideoElementClass, NULL) || GF_JS_InstanceOf(c, obj, &html_media_rt->htmlMediaElementClass, NULL)) { - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static JSBool SMJS_FUNCTION(html_media_addTextTrack) { - return JS_TRUE; + SMJS_OBJ + if (GF_JS_InstanceOf(c, obj, &html_media_rt->htmlAudioElementClass, NULL) || + GF_JS_InstanceOf(c, obj, &html_media_rt->htmlVideoElementClass, NULL) || + GF_JS_InstanceOf(c, obj, &html_media_rt->htmlMediaElementClass, NULL)) + { + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } void *html_get_element_class(GF_Node *n) @@ -417,35 +460,6 @@ void *html_get_element_class(GF_Node *n) } } -/* Used to retrieve the structure implementing the GF_HTML_MediaElement interface associated with this node - * Usually this is done with the private stack of the node (see gf_node_get_private), but in this case, - * the stack already contains the rendering stack SVG_video_stack. - * So, we store the structure implementing the GF_HTML_MediaElement interface in the JavaScript context of this node, - * as a non enumeratable property named 'gpac_me_impl' - * - * \param c the global JavaScript context - * \param n the audio or video node - * \return the GF_HTML_MediaElement associated with this node in the given context - */ -static GF_HTML_MediaElement *html_media_element_get_from_node(JSContext *c, GF_Node *n) -{ - jsval vp; - JSObject *me_obj; - JSObject *node_obj; - GF_HTML_MediaElement *me = NULL; - - if ((n->sgprivate->tag == TAG_SVG_video || n->sgprivate->tag == TAG_SVG_audio) && n->sgprivate->interact && n->sgprivate->interact->js_binding) { - node_obj = (JSObject *)n->sgprivate->interact->js_binding->node; - if (node_obj) - { - JS_GetProperty(c, node_obj, "gpac_me_impl", &vp); - me_obj = JSVAL_TO_OBJECT(vp); - me = (GF_HTML_MediaElement *)SMJS_GET_PRIVATE(c, me_obj); - } - } - return me; -} - /* Creates the GF_HTML_MediaElement structure for this node * Store it in the JavaScript context of this node, as a non enumeratable property named 'gpac_me_impl' * see \ref html_media_element_get_from_node for retrieving it @@ -456,8 +470,7 @@ static GF_HTML_MediaElement *html_media_element_get_from_node(JSContext *c, GF_N */ void html_media_element_js_init(JSContext *c, JSObject *node_obj, GF_Node *n) { - if (n->sgprivate->tag == TAG_SVG_video || n->sgprivate->tag == TAG_SVG_audio) - { + if (n->sgprivate->tag == TAG_SVG_video || n->sgprivate->tag == TAG_SVG_audio) { GF_HTML_MediaElement *me; me = gf_html_media_element_new(n, NULL); gf_html_media_element_init_js(me, c, node_obj); @@ -490,7 +503,7 @@ void gf_html_media_get_event_target(JSContext *c, JSObject *obj, GF_DOMEventTarg if (GF_JS_InstanceOf(c, obj, &html_media_rt->htmlVideoElementClass, NULL) || GF_JS_InstanceOf(c, obj, &html_media_rt->htmlAudioElementClass, NULL) ) { GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); - *target = gf_html_media_get_event_target_from_node(n); + *target = gf_dom_event_get_target_from_node(n); *sg = n->sgprivate->scenegraph; } else { *target = NULL; @@ -523,8 +536,10 @@ static SMJS_FUNC_PROP_GET(html_media_error_get_code) *vp = INT_TO_JSVAL(error->code); return JS_TRUE; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_error) @@ -549,8 +564,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_src) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_XLINK, "href", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_cors) @@ -569,8 +586,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_cors) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "crossorigin", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } void media_event_collect_info(GF_ClientService *net, GF_ObjectManager *odm, GF_DOMMediaEvent *media_event, u32 *min_time, u32 *min_buffer); @@ -673,9 +692,11 @@ static SMJS_FUNC_PROP_GET(html_media_get_const) default: return JS_TRUE; } - } - *vp = INT_TO_JSVAL( v ); - return JS_TRUE; + *vp = INT_TO_JSVAL( v ); + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_preload) @@ -694,13 +715,15 @@ static SMJS_FUNC_PROP_SET(html_media_set_preload) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "preload", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_buffered) HTML_MEDIA_JS_START - *vp = OBJECT_TO_JSVAL( me->buffered._this ); + *vp = OBJECT_TO_JSVAL( me->buffered->_this ); return JS_TRUE; } @@ -736,15 +759,12 @@ static SMJS_FUNC_PROP_SET(html_media_set_current_time) double d; GF_JSAPIParam par; HTML_MEDIA_JS_START - if (!JSVAL_IS_NUMBER(*vp)) - { - return JS_TRUE; + if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } JS_ValueToNumber(c, *vp, &d); par.time = d; - if (ScriptAction(n->sgprivate->scenegraph, GF_JSAPI_OP_SET_TIME, (GF_Node *)n, &par)) { - return JS_TRUE; - } + ScriptAction(n->sgprivate->scenegraph, GF_JSAPI_OP_SET_TIME, (GF_Node *)n, &par); return JS_TRUE; } @@ -785,6 +805,9 @@ static SMJS_FUNC_PROP_GET(html_media_get_default_playback_rate) static SMJS_FUNC_PROP_SET(html_media_set_default_playback_rate) jsdouble d; HTML_MEDIA_JS_START + if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } JS_ValueToNumber(c, *vp, &d); me->defaultPlaybackRate = d; return JS_TRUE; @@ -802,8 +825,12 @@ static SMJS_FUNC_PROP_GET(html_media_get_playback_rate) static SMJS_FUNC_PROP_SET(html_media_set_playback_rate) jsdouble d; Fixed speed; - GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); - GF_MediaObject *mo = gf_html_media_object(n); + GF_MediaObject *mo; + HTML_MEDIA_JS_START + mo = gf_html_media_object(n); + if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } JS_ValueToNumber(c, *vp, &d); speed = FLT2FIX(d); gf_mo_set_speed(mo, speed); @@ -812,19 +839,20 @@ static SMJS_FUNC_PROP_SET(html_media_set_playback_rate) static SMJS_FUNC_PROP_GET(html_media_get_played) HTML_MEDIA_JS_START - *vp =( OBJECT_TO_JSVAL( me->played._this ) ); + *vp =( OBJECT_TO_JSVAL( me->played->_this ) ); return JS_TRUE; } static SMJS_FUNC_PROP_GET(html_media_get_seekable) HTML_MEDIA_JS_START - *vp =( OBJECT_TO_JSVAL( me->seekable._this ) ); + *vp =( OBJECT_TO_JSVAL( me->seekable->_this ) ); return JS_TRUE; } static SMJS_FUNC_PROP_GET(html_media_get_ended) - GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); - GF_MediaObject *mo = gf_html_media_object(n); + GF_MediaObject *mo; + HTML_MEDIA_JS_START + mo = gf_html_media_object(n); *vp = BOOLEAN_TO_JSVAL( gf_mo_is_done(mo) ? JS_TRUE : JS_FALSE); return JS_TRUE; } @@ -849,8 +877,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_autoplay) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "autoplay", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_loop) @@ -875,8 +905,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_loop) char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "loop", str); //TODO: use gf_mo_get_loop - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_mediagroup) @@ -895,8 +927,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_mediagroup) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "mediagroup", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_controller) @@ -911,8 +945,12 @@ static SMJS_FUNC_PROP_GET(html_media_get_controller) static SMJS_FUNC_PROP_SET(html_media_set_controller) HTML_MEDIA_JS_START - me->controller = (GF_HTML_MediaController *)SMJS_GET_PRIVATE(c, JSVAL_TO_OBJECT(*vp)); - return JS_TRUE; + if (JSVAL_IS_OBJECT(*vp)) { + me->controller = (GF_HTML_MediaController *)SMJS_GET_PRIVATE(c, JSVAL_TO_OBJECT(*vp)); + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_controls) @@ -935,8 +973,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_controls) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "controls", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SVG_audio_stack *html_media_get_audio_stack(GF_Node *n) { @@ -973,8 +1013,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_volume) if (audio_stack) { JS_ValueToNumber(c, *vp, &volume); audio_stack->input.intensity = FLT2FIX(volume); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_muted) @@ -995,8 +1037,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_muted) audio_stack = html_media_get_audio_stack(n); if (audio_stack) { audio_stack->input.is_muted = (JSVAL_TO_BOOLEAN(*vp) == JS_TRUE ? GF_TRUE : GF_FALSE); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_default_muted) @@ -1021,8 +1065,10 @@ static SMJS_FUNC_PROP_SET(html_media_set_default_muted) if (JSVAL_CHECK_STRING(*vp)) { char *str = SMJS_CHARS(c, *vp); gf_svg_set_attributeNS(n, GF_XMLNS_SVG, "muted", str); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_media_get_audio_tracks) @@ -1044,7 +1090,11 @@ static SMJS_FUNC_PROP_GET(html_media_get_text_tracks) } static SMJS_FUNC_PROP_GET(html_time_ranges_get_length) - GF_HTML_MediaTimeRanges *timeranges = (GF_HTML_MediaTimeRanges *)SMJS_GET_PRIVATE(c, obj); + GF_HTML_MediaTimeRanges *timeranges; + if (!GF_JS_InstanceOf(c, obj, &html_media_rt->timeRangesClass, NULL)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + timeranges = (GF_HTML_MediaTimeRanges *)SMJS_GET_PRIVATE(c, obj); *vp = INT_TO_JSVAL( gf_list_count(timeranges->times)/2); return JS_TRUE; } @@ -1055,22 +1105,23 @@ static JSBool SMJS_FUNCTION(html_time_ranges_start) SMJS_OBJ SMJS_ARGS if ((argc!=1) || !GF_JS_InstanceOf(c, obj, &html_media_rt->timeRangesClass, NULL)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } timeranges = (GF_HTML_MediaTimeRanges *)SMJS_GET_PRIVATE(c, obj); if (!timeranges) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } if (JSVAL_IS_INT(argv[0])) { u32 i = JSVAL_TO_INT(argv[0]); - double *start_value = (double *)gf_list_get(timeranges->times, 2*i); + u64 *start_value = (u64 *)gf_list_get(timeranges->times, 2*i); if (!start_value) { - dom_throw_exception(c, GF_DOM_EXC_WRONG_DOCUMENT_ERR); - return JS_FALSE; + return dom_throw_exception(c, GF_DOM_EXC_INDEX_SIZE_ERR); } else { - SMJS_SET_RVAL(DOUBLE_TO_JSVAL(JS_NewDouble(c, *start_value))); + SMJS_SET_RVAL(DOUBLE_TO_JSVAL(JS_NewDouble(c, (*start_value)*1.0/timeranges->timescale))); } - } + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } return JS_TRUE; } @@ -1080,22 +1131,23 @@ static JSBool SMJS_FUNCTION(html_time_ranges_end) SMJS_OBJ SMJS_ARGS if ((argc!=1) || !GF_JS_InstanceOf(c, obj, &html_media_rt->timeRangesClass, NULL)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } timeranges = (GF_HTML_MediaTimeRanges *)SMJS_GET_PRIVATE(c, obj); if (!timeranges) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } if (JSVAL_IS_INT(argv[0])) { u32 i = JSVAL_TO_INT(argv[0]); - double *start_value = (double *)gf_list_get(timeranges->times, 2*i+1); - if (!start_value) { - dom_throw_exception(c, GF_DOM_EXC_WRONG_DOCUMENT_ERR); - return JS_FALSE; + u64 *end_value = (u64 *)gf_list_get(timeranges->times, 2*i+1); + if (!end_value) { + return dom_throw_exception(c, GF_DOM_EXC_INDEX_SIZE_ERR); } else { - SMJS_SET_RVAL(DOUBLE_TO_JSVAL(JS_NewDouble(c, *start_value))); + SMJS_SET_RVAL(DOUBLE_TO_JSVAL(JS_NewDouble(c, (*end_value)*1.0/timeranges->timescale))); } - } + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } return JS_TRUE; } @@ -1110,9 +1162,15 @@ static SMJS_FUNC_PROP_GET(html_track_list_get_length) GF_HTML_TrackList *tracklist; if (html_is_track_list(c, obj)) { tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); - *vp = INT_TO_JSVAL( gf_list_count(tracklist->tracks) ); + if (tracklist) { + *vp = INT_TO_JSVAL( gf_list_count(tracklist->tracks) ); + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } - return JS_TRUE; } static JSBool SMJS_FUNCTION(html_track_list_get_track_by_id) @@ -1129,9 +1187,13 @@ static JSBool SMJS_FUNCTION(html_track_list_get_track_by_id) SMJS_FREE(c, str); if (track) { SMJS_SET_RVAL(OBJECT_TO_JSVAL(track->_this)); + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } - return JS_TRUE; } static SMJS_FUNC_PROP_GET(html_track_list_get_property) @@ -1145,28 +1207,34 @@ static SMJS_FUNC_PROP_GET(html_track_list_get_property) GF_HTML_Track *track = (GF_HTML_Track *)gf_list_get(tracklist->tracks, (u32)index); *vp = OBJECT_TO_JSVAL(track->_this); } + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } - return JS_TRUE; } static SMJS_FUNC_PROP_GET(html_track_list_get_selected_index) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); *vp = INT_TO_JSVAL(tracklist->selected_index); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_track_list_get_onchange) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); - if (tracklist->onchange) { + if (! JSVAL_IS_NULL(tracklist->onchange)) { *vp = tracklist->onchange; } else { *vp = JSVAL_NULL; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } JSBool gf_set_js_eventhandler(JSContext *c, jsval vp, jsval *callbackfuncval); @@ -1175,55 +1243,65 @@ static SMJS_FUNC_PROP_SET(html_track_list_set_onchange) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); gf_set_js_eventhandler(c, *vp, &tracklist->onchange); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_track_list_get_onaddtrack) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); - if (tracklist->onaddtrack) { + if (! JSVAL_IS_NULL(tracklist->onaddtrack)) { *vp = tracklist->onaddtrack; } else { *vp = JSVAL_NULL; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_SET(html_track_list_set_onaddtrack) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); gf_set_js_eventhandler(c, *vp, &tracklist->onaddtrack); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_track_list_get_onremovetrack) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); - if (tracklist->onremovetrack) { + if (! JSVAL_IS_NULL(tracklist->onremovetrack) ) { *vp = tracklist->onremovetrack; } else { *vp = JSVAL_NULL; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_SET(html_track_list_set_onremovetrack) if (html_is_track_list(c, obj)) { GF_HTML_TrackList *tracklist = (GF_HTML_TrackList *)SMJS_GET_PRIVATE(c, obj); gf_set_js_eventhandler(c, *vp, &tracklist->onremovetrack); - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_track_get_property) if (html_is_track_list(c, obj)) { GF_HTML_Track *track = (GF_HTML_Track *)SMJS_GET_PRIVATE(c, obj); if (!SMJS_ID_IS_INT(id)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } switch (SMJS_ID_TO_INT(id)) { case HTML_TRACK_PROP_ID: @@ -1254,15 +1332,17 @@ static SMJS_FUNC_PROP_GET(html_track_get_property) } return JS_TRUE; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_SET(html_track_set_property) if (html_is_track_list(c, obj)) { GF_HTML_Track *track = (GF_HTML_Track *)SMJS_GET_PRIVATE(c, obj); if (!SMJS_ID_IS_INT(id)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } switch (SMJS_ID_TO_INT(id)) { case HTML_TRACK_PROP_SELECTED: @@ -1278,8 +1358,10 @@ static SMJS_FUNC_PROP_SET(html_track_set_property) } return JS_TRUE; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_GET(html_video_get_property) @@ -1292,7 +1374,7 @@ static SMJS_FUNC_PROP_GET(html_video_get_property) video = (SVG_video_stack *)n->sgprivate->UserPrivate; if (!SMJS_ID_IS_INT(id)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } switch (SMJS_ID_TO_INT(id)) { case HTML_VIDEO_PROP_WIDTH: @@ -1323,8 +1405,10 @@ static SMJS_FUNC_PROP_GET(html_video_get_property) } return JS_TRUE; } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static SMJS_FUNC_PROP_SET(html_video_set_property) @@ -1332,7 +1416,7 @@ static SMJS_FUNC_PROP_SET(html_video_set_property) { GF_Node *n = (GF_Node *)SMJS_GET_PRIVATE(c, obj); if (!SMJS_ID_IS_INT(id)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } switch (SMJS_ID_TO_INT(id)) { case HTML_VIDEO_PROP_WIDTH: @@ -1349,8 +1433,10 @@ static SMJS_FUNC_PROP_SET(html_video_set_property) return JS_TRUE; } } - } - return JS_TRUE; + return JS_TRUE; + } else { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } } static JSBool SMJS_FUNCTION(html_media_event_add_listener) diff --git a/src/scenegraph/html5_mse_smjs.c b/src/scenegraph/html5_mse_smjs.c index b5ef336..6ecd526 100644 --- a/src/scenegraph/html5_mse_smjs.c +++ b/src/scenegraph/html5_mse_smjs.c @@ -106,8 +106,8 @@ static void mediasource_sourceBuffer_initjs(JSContext *c, JSObject *ms_obj, GF_H sb->_this = JS_NewObject(c, &html_media_rt->sourceBufferClass._class, 0, 0); //gf_js_add_root(c, &sb->_this, GF_JSGC_OBJECT); SMJS_SET_PRIVATE(c, sb->_this, sb); - sb->buffered._this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, sb->_this); - SMJS_SET_PRIVATE(c, sb->buffered._this, &sb->buffered); + sb->buffered->_this = JS_NewObject(c, &html_media_rt->timeRangesClass._class, NULL, sb->_this); + SMJS_SET_PRIVATE(c, sb->buffered->_this, sb->buffered); } #include @@ -118,16 +118,20 @@ static JSBool SMJS_FUNCTION(mediasource_is_type_supported) SMJS_ARGS GF_SceneGraph *sg; GF_JSAPIParam par; - Bool isSupported; + Bool isSupported = GF_TRUE; char *mime; - if (!argc || !JSVAL_CHECK_STRING(argv[0]) ) - { - return JS_TRUE; + if (!argc || !JSVAL_CHECK_STRING(argv[0])) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } mime = SMJS_CHARS(c, argv[0]); sg = mediasource_get_scenegraph(c); - sg->script_action(sg->script_action_cbck, GF_JSAPI_OP_GET_TERM, NULL, &par); - isSupported = gf_term_is_type_supported((GF_Terminal *)par.term, mime); + assert(sg); + if (!strlen(mime)) { + isSupported = GF_FALSE; + } else { + sg->script_action(sg->script_action_cbck, GF_JSAPI_OP_GET_TERM, NULL, &par); + isSupported = gf_term_is_type_supported((GF_Terminal *)par.term, mime); + } SMJS_SET_RVAL(BOOLEAN_TO_JSVAL(isSupported ? JS_TRUE : JS_FALSE)); SMJS_FREE(c, mime); return JS_TRUE; @@ -141,25 +145,27 @@ static JSBool SMJS_FUNCTION(mediasource_addSourceBuffer) GF_HTML_MediaSource *ms; const char *mime; GF_Err e; + u32 exception = 0; e = GF_OK; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL) ) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } if (!argc || !JSVAL_CHECK_STRING(argv[0])) { return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } mime = SMJS_CHARS(c, argv[0]); - if (!strlen(mime)) - { - return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + if (!strlen(mime)) { + exception = GF_DOM_EXC_INVALID_ACCESS_ERR; + goto exit; } ms = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); - if (ms->readyState != MEDIA_SOURCE_READYSTATE_OPEN) - { - dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); - e = GF_BAD_PARAM; + if (!ms) { + exception = GF_DOM_EXC_INVALID_ACCESS_ERR; + goto exit; + } else if (ms->readyState != MEDIA_SOURCE_READYSTATE_OPEN) { + exception = GF_DOM_EXC_INVALID_STATE_ERR; goto exit; } assert(ms->service); @@ -171,34 +177,57 @@ static JSBool SMJS_FUNCTION(mediasource_addSourceBuffer) } */ sb = gf_mse_source_buffer_new(ms); + assert(sb); e = gf_mse_source_buffer_load_parser(sb, mime); if (e == GF_OK) { - gf_mse_add_source_buffer(ms, sb); + gf_mse_mediasource_add_source_buffer(ms, sb); mediasource_sourceBuffer_initjs(c, obj, sb); SMJS_SET_RVAL( OBJECT_TO_JSVAL(sb->_this) ); } else { gf_mse_source_buffer_del(sb); - dom_throw_exception(c, GF_DOM_EXC_NOT_SUPPORTED_ERR); + exception = GF_DOM_EXC_NOT_SUPPORTED_ERR; } exit: - SMJS_FREE(c, (void *)mime); - if (e == GF_OK) { - return JS_TRUE; + if (mime) { + SMJS_FREE(c, (void *)mime); + } + if (exception) { + return dom_throw_exception(c, exception); } else { - return JS_FALSE; + return JS_TRUE; } } static JSBool SMJS_FUNCTION(mediasource_removeSourceBuffer) { SMJS_OBJ -// SMJS_ARGS -// GF_HTML_MediaSource *ms; + SMJS_ARGS + GF_HTML_MediaSource *ms; + GF_HTML_SourceBuffer *sb; + JSObject *sb_obj; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL) ) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } -// ms = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); - /* TODO */ + ms = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); + if (!ms) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + if (!argc || JSVAL_IS_NULL(argv[0]) || !JSVAL_IS_OBJECT(argv[0])) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + sb_obj = JSVAL_TO_OBJECT(argv[0]); + if (!GF_JS_InstanceOf(c, sb_obj, &html_media_rt->sourceBufferClass, NULL) ) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + sb = (GF_HTML_SourceBuffer *)SMJS_GET_PRIVATE(c, sb_obj); + if (!sb) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { + GF_Err e = gf_mse_remove_source_buffer(ms, sb); + if (e == GF_NOT_FOUND) { + return dom_throw_exception(c, GF_DOM_EXC_NOT_FOUND_ERR); + } + } return JS_TRUE; } @@ -209,34 +238,32 @@ static JSBool SMJS_FUNCTION(mediasource_endOfStream) GF_HTML_MediaSource *ms; u32 i; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL) ) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } ms = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); + if (!ms) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } if (ms->readyState != MEDIA_SOURCE_READYSTATE_OPEN) { - dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); - return JS_FALSE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); } for (i = 0; i < gf_list_count(ms->sourceBuffers.list); i++) { GF_HTML_SourceBuffer *sb = (GF_HTML_SourceBuffer *)gf_list_get(ms->sourceBuffers.list, i); if (sb->updating) { - dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); - return JS_FALSE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); } } - if (argc > 0) - { + if (argc > 0) { char *error = NULL; if (!JSVAL_CHECK_STRING(argv[0])) { - dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); - return JS_FALSE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } error = SMJS_CHARS(c, argv[0]); if (strcmp(error, "decode") && strcmp(error, "network")) { SMJS_FREE(c, error); - dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); - return JS_FALSE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } - SMJS_FREE(c, error); + SMJS_FREE(c, error); } gf_mse_mediasource_end(ms); return JS_TRUE; @@ -275,10 +302,12 @@ static DECL_FINALIZE(media_source_finalize) static SMJS_FUNC_PROP_GET(media_source_get_source_buffers) GF_HTML_MediaSource *p; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL) ) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } p = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); - if (p) { + if (!p) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { *vp = OBJECT_TO_JSVAL(p->sourceBuffers._this); return JS_TRUE; } @@ -301,10 +330,12 @@ static SMJS_FUNC_PROP_GET(media_source_get_active_source_buffers) static SMJS_FUNC_PROP_GET(media_source_get_ready_state) GF_HTML_MediaSource *p; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL) ) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } p = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); - if (p) { + if (!p) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { switch (p->readyState) { case MEDIA_SOURCE_READYSTATE_CLOSED: @@ -317,7 +348,6 @@ static SMJS_FUNC_PROP_GET(media_source_get_ready_state) *vp = STRING_TO_JSVAL( JS_NewStringCopyZ(c, "ended")); break; } - return JS_TRUE; } return JS_TRUE; } @@ -342,13 +372,47 @@ static SMJS_FUNC_PROP_GET(media_source_get_duration) } static SMJS_FUNC_PROP_SET(media_source_set_duration) + GF_HTML_MediaSource *ms; + if (!GF_JS_InstanceOf(c, obj, &html_media_rt->mediaSourceClass, NULL)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } + ms = (GF_HTML_MediaSource *)SMJS_GET_PRIVATE(c, obj); + if (!ms) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { + if (ms->readyState != MEDIA_SOURCE_READYSTATE_OPEN) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } else if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { + u32 i, count; + count = gf_list_count(ms->sourceBuffers.list); + for (i = 0; i < count; i++) { + GF_HTML_SourceBuffer *sb = (GF_HTML_SourceBuffer *)gf_list_get(ms->sourceBuffers.list, i); + if (sb->updating) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } + } + { + jsdouble durationValue; + JS_ValueToNumber(c, *vp, &durationValue); + if (durationValue < 0) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } else { + ms->duration = durationValue; + ms->durationType = DURATION_VALUE; + /* TODO: call the run duration algorithm */ + } + } + } + } return JS_TRUE; } static SMJS_FUNC_PROP_GET( sourcebufferlist_getProperty) GF_HTML_SourceBufferList *p; u32 count; - u32 idx; + s32 idx; if (!GF_JS_InstanceOf(c, obj, &html_media_rt->sourceBufferListClass, NULL) ) { return JS_TRUE; @@ -385,21 +449,25 @@ static SMJS_FUNC_PROP_GET( sourcebufferlist_getProperty) GF_HTML_SourceBuffer *sb; \ if (!GF_JS_InstanceOf(c, obj, &html_media_rt->sourceBufferClass, NULL) ) \ { \ - return dom_throw_exception(c, GF_DOM_EXC_TYPE_MISMATCH_ERR); \ + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); \ }\ sb = (GF_HTML_SourceBuffer *)SMJS_GET_PRIVATE(c, obj);\ - /* check if this source buffer is still in the list of source buffers */\ - if (!sb || gf_list_find(sb->mediasource->sourceBuffers.list, sb) < 0)\ + if (!sb)\ {\ - return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); \ + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); \ } #define SB_UPDATING_CHECK \ SB_BASIC_CHECK \ + /* check if this source buffer is still in the list of source buffers */\ + if (gf_list_find(sb->mediasource->sourceBuffers.list, sb) < 0)\ + {\ + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); \ + } \ if (sb->updating)\ {\ return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); \ - }\ + } /* FIXME : Function not used, generates warning on debian static DECL_FINALIZE(sourcebuffer_finalize) @@ -428,8 +496,7 @@ static JSBool SMJS_FUNCTION(sourcebuffer_appendBuffer) gf_mse_mediasource_open(sb->mediasource, NULL); } - if (!argc || JSVAL_IS_NULL(argv[0]) || !JSVAL_IS_OBJECT(argv[0])) - { + if (!argc || JSVAL_IS_NULL(argv[0]) || !JSVAL_IS_OBJECT(argv[0])) { return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } js_ab = JSVAL_TO_OBJECT(argv[0]); @@ -459,11 +526,13 @@ static JSBool SMJS_FUNCTION(sourcebuffer_appendStream) static JSBool SMJS_FUNCTION(sourcebuffer_abort) { SMJS_OBJ -// SMJS_ARGS SB_BASIC_CHECK + if (gf_list_find(sb->mediasource->sourceBuffers.list, sb) < 0) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } if (sb->mediasource->readyState != MEDIA_SOURCE_READYSTATE_OPEN) { - return JS_TRUE; - } + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } if (gf_mse_source_buffer_abort(sb) != GF_OK) { return JS_TRUE; } @@ -477,21 +546,17 @@ static JSBool SMJS_FUNCTION(sourcebuffer_remove) jsdouble start, end; SB_UPDATING_CHECK if (argc < 2 || !JSVAL_IS_NUMBER(argv[0]) || !JSVAL_IS_NUMBER(argv[1])) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } JS_ValueToNumber(c, argv[0], &start); JS_ValueToNumber(c, argv[1], &end); if (start < 0 /* || start > sb->duration */ || start >= end) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } if (sb->mediasource->readyState != MEDIA_SOURCE_READYSTATE_OPEN) { - return JS_TRUE; - } - sb->updating = GF_TRUE; - if (!sb->remove_thread) { - sb->remove_thread = gf_th_new(NULL); + gf_mse_mediasource_open(sb->mediasource, NULL); } - gf_th_run(sb->remove_thread, gf_mse_source_buffer_remove, sb); + gf_mse_remove(sb, start, end); return JS_TRUE; } @@ -507,32 +572,39 @@ static SMJS_FUNC_PROP_GET(sourceBuffer_get_mode) static SMJS_FUNC_PROP_SET(sourceBuffer_set_mode) char *smode = NULL; + GF_HTML_MediaSource_AppendMode mode; SB_BASIC_CHECK if (!JSVAL_CHECK_STRING(*vp)) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } smode = SMJS_CHARS(c, *vp); if (stricmp(smode, "segments") && stricmp(smode, "sequence")) { - return JS_TRUE; - } - if (sb->updating) { - return JS_TRUE; + SMJS_FREE(c, smode); + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } + if (!stricmp(smode, "segments")) { + mode = MEDIA_SOURCE_APPEND_MODE_SEGMENTS; + } else if (!stricmp(smode, "sequence")) { + mode = MEDIA_SOURCE_APPEND_MODE_SEQUENCE; + } + SMJS_FREE(c, smode); + if (gf_list_find(sb->mediasource->sourceBuffers.list, sb) < 0) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } + if (sb->updating) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } if (sb->mediasource->readyState == MEDIA_SOURCE_READYSTATE_ENDED) { gf_mse_mediasource_open(sb->mediasource, NULL); } if (sb->append_state == MEDIA_SOURCE_APPEND_STATE_PARSING_MEDIA_SEGMENT) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); } - if (!stricmp(smode, "segments")) { - sb->append_mode = MEDIA_SOURCE_APPEND_MODE_SEGMENTS; - } else if (!stricmp(smode, "sequence")) { - sb->append_mode = MEDIA_SOURCE_APPEND_MODE_SEQUENCE; - } + sb->append_mode = mode; if (sb->append_mode == MEDIA_SOURCE_APPEND_MODE_SEQUENCE) { - /* TODO */ + sb->group_start_timestamp_flag = GF_TRUE; + sb->group_start_timestamp = sb->group_end_timestamp; } - SMJS_FREE(c, smode); return JS_TRUE; } @@ -544,15 +616,21 @@ static SMJS_FUNC_PROP_GET(sourceBuffer_get_updating) static SMJS_FUNC_PROP_GET(sourceBuffer_get_timestampOffset) SB_BASIC_CHECK - *vp = DOUBLE_TO_JSVAL(JS_NewDouble(c, sb->timestampOffset)); + *vp = DOUBLE_TO_JSVAL(JS_NewDouble(c, sb->timestampOffset*1.0/sb->timescale)); return JS_TRUE; } static SMJS_FUNC_PROP_SET(sourceBuffer_set_timestampOffset) jsdouble d; - SB_BASIC_CHECK + SB_UPDATING_CHECK + if (sb->mediasource->readyState == MEDIA_SOURCE_READYSTATE_ENDED) { + gf_mse_mediasource_open(sb->mediasource, NULL); + } + if (sb->append_state == MEDIA_SOURCE_APPEND_STATE_PARSING_MEDIA_SEGMENT) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_STATE_ERR); + } JS_ValueToNumber(c, *vp, &d); - sb->timestampOffset = d; + gf_mse_source_buffer_set_timestampOffset(sb, d); return JS_TRUE; } @@ -564,12 +642,12 @@ static SMJS_FUNC_PROP_GET(sourceBuffer_get_timescale) static SMJS_FUNC_PROP_SET(sourceBuffer_set_timescale) SB_BASIC_CHECK - sb->timescale = JSVAL_TO_INT(*vp); + gf_mse_source_buffer_set_timescale(sb, JSVAL_TO_INT(*vp)); return JS_TRUE; } static SMJS_FUNC_PROP_GET(sourceBuffer_get_appendWindowStart) - SB_UPDATING_CHECK + SB_BASIC_CHECK *vp = DOUBLE_TO_JSVAL(JS_NewDouble(c, sb->appendWindowStart)); return JS_TRUE; } @@ -577,26 +655,36 @@ static SMJS_FUNC_PROP_GET(sourceBuffer_get_appendWindowStart) static SMJS_FUNC_PROP_SET(sourceBuffer_set_appendWindowStart) jsdouble d; SB_UPDATING_CHECK + if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } JS_ValueToNumber(c, *vp, &d); if (d < 0 || d >= sb->appendWindowEnd) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } sb->appendWindowStart = d; return JS_TRUE; } static SMJS_FUNC_PROP_GET(sourceBuffer_get_appendWindowEnd) - SB_UPDATING_CHECK - *vp = DOUBLE_TO_JSVAL(JS_NewDouble(c, sb->appendWindowEnd)); + SB_BASIC_CHECK + if (sb->appendWindowEnd == GF_MAX_DOUBLE) { + *vp = JS_GetPositiveInfinityValue(c); + } else { + *vp = DOUBLE_TO_JSVAL(JS_NewDouble(c, sb->appendWindowEnd)); + } return JS_TRUE; } static SMJS_FUNC_PROP_SET(sourceBuffer_set_appendWindowEnd) jsdouble d; SB_UPDATING_CHECK + if (!JSVAL_IS_NUMBER(*vp)) { + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); + } JS_ValueToNumber(c, *vp, &d); if (d <= sb->appendWindowStart) { - return JS_TRUE; + return dom_throw_exception(c, GF_DOM_EXC_INVALID_ACCESS_ERR); } sb->appendWindowEnd = d; return JS_TRUE; @@ -605,12 +693,13 @@ static SMJS_FUNC_PROP_SET(sourceBuffer_set_appendWindowEnd) static SMJS_FUNC_PROP_GET(sourceBuffer_get_buffered) SB_BASIC_CHECK gf_mse_source_buffer_update_buffered(sb); - *vp = OBJECT_TO_JSVAL(sb->buffered._this); + *vp = OBJECT_TO_JSVAL(sb->buffered->_this); return JS_TRUE; } static SMJS_FUNC_PROP_GET(sourceBuffer_get_tracks) SB_BASIC_CHECK + /* TODO */ return JS_TRUE; } diff --git a/src/scenegraph/svg_attributes.c b/src/scenegraph/svg_attributes.c index 517728e..5c6a9ea 100644 --- a/src/scenegraph/svg_attributes.c +++ b/src/scenegraph/svg_attributes.c @@ -422,7 +422,7 @@ const char *gf_dom_get_key_name(u32 key_identifier) GF_KeyCode gf_dom_get_key_type(char *key_name) { if (strlen(key_name) == 1) { - unsigned char c[2]; + char c[2]; c[0] = key_name[0]; c[1] = 0; strupr(c); @@ -432,7 +432,7 @@ GF_KeyCode gf_dom_get_key_type(char *key_name) if (c[0] >= '0' && c[0] <= '9') return ( GF_KEY_0 + (c[0] - '0') ); - switch (c[0]) { + switch ((u8) c[0]) { case '@': return GF_KEY_AT; case '*': return GF_KEY_STAR; case '#': return GF_KEY_NUMBER; diff --git a/src/scenegraph/vrml_smjs.c b/src/scenegraph/vrml_smjs.c index 370da22..1a3504a 100644 --- a/src/scenegraph/vrml_smjs.c +++ b/src/scenegraph/vrml_smjs.c @@ -229,7 +229,7 @@ Bool gf_js_remove_root(JSContext *cx, void *rp, u32 type) if (!cx) JS_RemoveValueRootRT(js_rt->js_runtime, rp); else #endif - JS_RemoveValueRoot(cx, rp); + JS_RemoveValueRoot(cx, (jsval *) rp); break; default: if (cx) JS_RemoveGCThingRoot(cx, rp); @@ -2887,8 +2887,10 @@ static SMJS_FUNC_PROP_SET( array_setLength) GF_JSField *ptr = (GF_JSField *) SMJS_GET_PRIVATE(c, obj); if (!JSVAL_IS_INT(*vp) || JSVAL_TO_INT(*vp) < 0) return JS_FALSE; /*avoids gcc warning*/ +#ifndef GPAC_CONFIG_DARWIN if (!id) id=0; - len = JSVAL_TO_INT(*vp); +#endif + len = JSVAL_TO_INT(*vp); if (!len) { @@ -2968,8 +2970,9 @@ static SMJS_FUNC_PROP_GET( array_getLength) jsuint len; GF_JSField *ptr = (GF_JSField *) SMJS_GET_PRIVATE(c, obj); /*avoids gcc warning*/ +#ifndef GPAC_CONFIG_DARWIN if (!id) id=0; - +#endif if (ptr->field.fieldType==GF_SG_VRML_MFNODE) { len = gf_node_list_get_count(*(GF_ChildNodeItem **)ptr->field.far_ptr); ret = JS_TRUE; diff --git a/src/terminal/channel.c b/src/terminal/channel.c index a4c097f..1193b21 100644 --- a/src/terminal/channel.c +++ b/src/terminal/channel.c @@ -264,8 +264,8 @@ void Channel_WaitRAP(GF_Channel *ch) { ch->pck_sn = 0; - /*if using RAP signal and codec not resilient, wait for rap. If RAP isn't signaled DON'T wait for it :)*/ - if (!ch->codec_resilient) + /*if using RAP signal and codec not resilient, wait for rap. If RAP isn't signaled, this will be ignored*/ + if (ch->codec_resilient != GF_CODEC_RESILIENT_ALWAYS) ch->stream_state = 2; if (ch->buffer) gf_free(ch->buffer); ch->buffer = NULL; @@ -350,11 +350,14 @@ static Bool Channel_NeedsBuffering(GF_Channel *ch, u32 ForRebuffering) if (ch->BufferTime < (s32) ch->MaxBuffer) { /*check last AU time*/ u32 now = gf_term_get_time(ch->odm->term); - /*if more than half sec since last AU don't buffer and prevent rebuffering on short streams - this will also work for channels ignoring timing*/ - if (now>ch->last_au_time + MAX(ch->BufferTime, 500) ) { + /*if more than MaxBuffer sec since last AU don't buffer and prevent rebuffering on short streams + this will also work for channels ignoring timing + we use MaxBuffer as some transport protocols (HTTP streaming, DVB-H) will work in burst modes of MaxBuffer + */ + if (now > ch->last_au_time + 2*ch->MaxBuffer ) { /*this can be safely seen as a stream with very few updates (likely only one)*/ - if (!ch->AU_buffer_first && ch->first_au_fetched) ch->MinBuffer = 0; + if (!ch->AU_buffer_first && ch->first_au_fetched) + ch->MinBuffer = 0; return 0; } return 1; @@ -616,7 +619,7 @@ static void Channel_DispatchAU(GF_Channel *ch, u32 duration) ch->au_duration = 0; if (duration) ch->au_duration = (u32) ((u64)1000 * duration / ch->ts_res); - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d - Dispatch AU DTS %d - CTS %d - RAP %d - size %d time %d Buffer %d Nb AUs %d - First AU relative timing %d\n", ch->esd->ESID, au->DTS, au->CTS, au->flags&1, au->dataLength, gf_clock_real_time(ch->clock), ch->BufferTime, ch->AU_Count, ch->AU_buffer_first ? ch->AU_buffer_first->DTS - gf_clock_time(ch->clock) : 0 )); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d (%s) - Dispatch AU DTS %d - CTS %d - RAP %d - size %d time %d Buffer %d Nb AUs %d - First AU relative timing %d\n", ch->esd->ESID, ch->odm->net_service->url, au->DTS, au->CTS, au->flags&1, au->dataLength, gf_clock_real_time(ch->clock), ch->BufferTime, ch->AU_Count, ch->AU_buffer_first ? ch->AU_buffer_first->DTS - gf_clock_time(ch->clock) : 0 )); /*little optimisation: if direct dispatching is possible, try to decode the AU we must lock the media scheduler to avoid deadlocks with other codecs accessing the scene or @@ -961,7 +964,7 @@ void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *paylo /*get RAP*/ if (ch->esd->slConfig->hasRandomAccessUnitsOnlyFlag) { hdr.randomAccessPointFlag = 1; - } else if ((ch->carousel_type!=GF_ESM_CAROUSEL_MPEG2) && (!ch->esd->slConfig->useRandomAccessPointFlag || ch->codec_resilient) ) { + } else if ((ch->carousel_type!=GF_ESM_CAROUSEL_MPEG2) && (!ch->esd->slConfig->useRandomAccessPointFlag || (ch->codec_resilient==GF_CODEC_RESILIENT_ALWAYS) ) ) { ch->stream_state = 0; } @@ -1014,6 +1017,23 @@ void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *paylo init_ts = 1; } + + /*if we had a previous buffer, add or discard it, depending on codec resilience*/ + if (hdr.accessUnitStartFlag && ch->buffer) { + if (ch->esd->slConfig->useAccessUnitEndFlag) { + GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: missed end of AU (DTS %d)\n", ch->esd->ESID, ch->DTS)); + } + if (ch->codec_resilient) { + if (!ch->IsClockInit && !ch->skip_time_check_for_pending) gf_es_check_timing(ch); + Channel_DispatchAU(ch, 0); + } else { + gf_free(ch->buffer); + ch->buffer = NULL; + ch->AULength = 0; + ch->len = ch->allocSize = 0; + } + } + if (init_ts) { /*Get CTS */ if (ch->esd->slConfig->useTimestampsFlag) { @@ -1026,8 +1046,7 @@ void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *paylo /*until clock is not init check seed ts*/ if (!ch->IsClockInit && (ch->net_dts < ch->seed_ts)) ch->seed_ts = ch->net_dts; -#endif - +#endif if (ch->net_ctsseed_ts) { u64 diff = ch->seed_ts - ch->net_cts; @@ -1046,6 +1065,11 @@ void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *paylo ch->DTS = (u32) (ch->ts_offset + (s64) (ch->net_dts) * 1000 / ch->ts_res); } + if (ch->odm->parentscene && ch->odm->parentscene->root_od->addon) { + ch->DTS = (u32) gf_scene_adjust_timestamp_for_addon(ch->odm->parentscene, ch->DTS, ch->odm->parentscene->root_od->addon); + ch->CTS = (u32) gf_scene_adjust_timestamp_for_addon(ch->odm->parentscene, ch->CTS, ch->odm->parentscene->root_od->addon); + } + if (ch->clock->probe_ocr && gf_es_owns_clock(ch)) { s32 diff_ts = ch->DTS; diff_ts -= ch->clock->init_time; @@ -1188,23 +1212,6 @@ void gf_es_receive_sl_packet(GF_ClientService *serv, GF_Channel *ch, char *paylo } } - - /*if we had a previous buffer, add or discard it, depending on codec resilience*/ - if (hdr.accessUnitStartFlag && ch->buffer) { - if (ch->esd->slConfig->useAccessUnitEndFlag) { - GF_LOG(GF_LOG_WARNING, GF_LOG_SYNC, ("[SyncLayer] ES%d: missed end of AU (DTS %d)\n", ch->esd->ESID, ch->DTS)); - } - if (ch->codec_resilient) { - if (!ch->IsClockInit && !ch->skip_time_check_for_pending) gf_es_check_timing(ch); - Channel_DispatchAU(ch, 0); - } else { - gf_free(ch->buffer); - ch->buffer = NULL; - ch->AULength = 0; - ch->len = ch->allocSize = 0; - } - } - /*update the RAP marker on a packet base (to cope with AVC/H264 NALU->AU reconstruction)*/ if (hdr.randomAccessPointFlag) ch->IsRap = 1; @@ -1344,7 +1351,9 @@ GF_DBUnit *gf_es_get_au(GF_Channel *ch) if (ch->es_state != GF_ESM_ES_RUNNING) return NULL; if (!ch->is_pulling) { - if (!ch->AU_buffer_first) { + gf_mx_p(ch->mx); + + if (!ch->AU_buffer_first || (ch->BufferTime < (s32) ch->MaxBuffer/2) ) { /*query buffer level, don't sleep if too low*/ GF_NetworkCommand com; com.command_type = GF_NET_SERVICE_FLUSH_DATA; @@ -1354,7 +1363,9 @@ GF_DBUnit *gf_es_get_au(GF_Channel *ch) /*we must update buffering before fetching in order to stop buffering for streams with very few updates (especially streams with one update, like most of OD streams)*/ - if (ch->BufferOn) Channel_UpdateBuffering(ch, 0); + if (ch->BufferOn && ch->AU_buffer_first) Channel_UpdateBuffering(ch, 0); + gf_mx_v(ch->mx); + if (ch->BufferOn) { if (ch->first_au_fetched || !ch->AU_buffer_first || !ch->AU_buffer_first->next) return NULL; @@ -1443,7 +1454,7 @@ GF_DBUnit *gf_es_get_au(GF_Channel *ch) return NULL; } } - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d - Dispatch Pull AU DTS %d - CTS %d - size %d time %d - UTC "LLU" ms\n", ch->esd->ESID, ch->DTS, ch->CTS, ch->AU_buffer_pull->dataLength, gf_clock_real_time(ch->clock), gf_net_get_utc() )); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ES%d (%s) - Dispatch Pull AU DTS %d - CTS %d - size %d time %d - UTC "LLU" ms\n", ch->esd->ESID, ch->odm->net_service->url, ch->DTS, ch->CTS, ch->AU_buffer_pull->dataLength, gf_clock_real_time(ch->clock), gf_net_get_utc() )); } /*this may happen in file streaming when data has not arrived yet, in which case we discard the AU*/ @@ -1459,10 +1470,13 @@ GF_DBUnit *gf_es_get_au(GF_Channel *ch) if (ch->pull_forced_buffer) { assert(ch->BufferOn); - gf_term_service_media_event(ch->odm, GF_EVENT_MEDIA_PLAYING); ch->pull_forced_buffer=0; gf_es_buffer_off(ch); + Channel_UpdateBuffering(ch, 1); + } else if (is_new_data && !ch->first_au_fetched) { + Channel_UpdateBuffering(ch, 1); } + return ch->AU_buffer_pull; } diff --git a/src/terminal/clock.c b/src/terminal/clock.c index 65b0c19..a8e4521 100644 --- a/src/terminal/clock.c +++ b/src/terminal/clock.c @@ -194,7 +194,8 @@ void gf_clock_set_time(GF_Clock *ck, u32 TS) void gf_clock_pause(GF_Clock *ck) { gf_mx_p(ck->mx); - if (!ck->Paused) ck->PauseTime = gf_term_get_time(ck->term); + if (!ck->Paused) + ck->PauseTime = gf_term_get_time(ck->term); ck->Paused += 1; gf_mx_v(ck->mx); } @@ -203,6 +204,9 @@ void gf_clock_resume(GF_Clock *ck) { gf_mx_p(ck->mx); assert(ck->Paused); + if (!ck->Paused) { + assert(!ck->Buffering); + } ck->Paused -= 1; if (!ck->Paused) ck->StartTime += gf_term_get_time(ck->term) - ck->PauseTime; diff --git a/src/terminal/decoder.c b/src/terminal/decoder.c index eaa1767..de45583 100644 --- a/src/terminal/decoder.c +++ b/src/terminal/decoder.c @@ -39,6 +39,24 @@ GF_Err gf_codec_process_raw_media_pull(GF_Codec *codec, u32 TimeAvailable); GF_Codec *gf_codec_new(GF_ObjectManager *odm, GF_ESD *base_layer, s32 PL, GF_Err *e) { GF_Codec *tmp; + + //this is an addon, we must check if it's scalable stream or not ... + //if so, do not create any new codec + if (odm->parentscene && odm->parentscene->root_od->addon) { + switch (base_layer->decoderConfig->objectTypeIndication) { + case GPAC_OTI_VIDEO_SHVC: + case GPAC_OTI_VIDEO_SVC: + odm->scalable_addon = 1; + odm->parentscene->root_od->addon->scalable_type = 1; + *e = GF_OK; + //fixme - we need a way to signal dependencies accross services!! + base_layer->dependsOnESID = 0xFFFF; + return NULL; + default: + break; + } + } + GF_SAFEALLOC(tmp, GF_Codec); if (! tmp) { *e = GF_OUT_OF_MEM; @@ -60,7 +78,13 @@ GF_Codec *gf_codec_new(GF_ObjectManager *odm, GF_ESD *base_layer, s32 PL, GF_Err tmp->Status = GF_ESM_CODEC_STOP; if (tmp->type==GF_STREAM_PRIVATE_MEDIA) tmp->type = GF_STREAM_VISUAL; - + + if (tmp->type==GF_STREAM_VISUAL) { + GF_CodecCapability cap; + cap.CapCode = GF_CODEC_DISPLAY_BPP; + cap.cap.valueInt = odm->term->compositor->video_out->max_screen_bpp; + gf_codec_set_capability(tmp, cap); + } tmp->Priority = base_layer->streamPriority ? base_layer->streamPriority : 1; GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[Codec] Found decoder %s for stream type %s\n", tmp->decio ? tmp->decio->module_name : "RAW", gf_esd_get_textual_description(base_layer) )); @@ -203,6 +227,12 @@ GF_Err gf_codec_add_channel(GF_Codec *codec, GF_Channel *ch) cap.CapCode = GF_CODEC_REORDER; if (gf_codec_get_capability(codec, &cap) == GF_OK) codec->is_reordering = cap.cap.valueInt; + + codec->trusted_cts = 0; + cap.CapCode = GF_CODEC_TRUSTED_CTS; + if (gf_codec_get_capability(codec, &cap) == GF_OK) + codec->trusted_cts = cap.cap.valueInt; + } if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) { @@ -266,7 +296,6 @@ GF_Err gf_codec_add_channel(GF_Codec *codec, GF_Channel *ch) } } - /*assign the first base layer as the codec clock by default, or current channel clock if no clock set Also assign codec priority here*/ if (!ch->esd->dependsOnESID || !codec->ck) { @@ -291,6 +320,14 @@ GF_Err gf_codec_add_channel(GF_Codec *codec, GF_Channel *ch) } } +Bool gf_codec_is_scene_or_image(GF_Codec *codec) +{ + if (!codec) return GF_TRUE; + if (!codec->CB) return GF_TRUE; + if (codec->CB->Capacity>1 || codec->CB->no_allocation) return GF_FALSE; + return GF_TRUE; +} + Bool gf_codec_remove_channel(GF_Codec *codec, struct _es_channel *ch) { s32 i; @@ -311,7 +348,7 @@ Bool gf_codec_remove_channel(GF_Codec *codec, struct _es_channel *ch) } -static void codec_update_stats(GF_Codec *codec, u32 dataLength, u32 dec_time, u32 DTS) +static void codec_update_stats(GF_Codec *codec, u32 dataLength, u64 dec_time, u32 DTS) { codec->total_dec_time += dec_time; codec->last_frame_time = gf_sys_clock(); @@ -344,18 +381,22 @@ static void MediaDecoder_GetNextAU(GF_Codec *codec, GF_Channel **activeChannel, { GF_Channel *ch; GF_DBUnit *AU; + GF_List *src_channels = codec->inChannels; + GF_ObjectManager *current_odm = codec->odm; u32 count, curCTS, i; - count = gf_list_count(codec->inChannels); + *nextAU = NULL; *activeChannel = NULL; + curCTS = 0; + +browse_scalable: + count = gf_list_count(src_channels); if (!count) return; - curCTS = 0; - /*browse from base to top layer*/ for (i=0;iinChannels, i); + ch = (GF_Channel*)gf_list_get(src_channels, i); if ((codec->type==GF_STREAM_OCR) && ch->IsClockInit) { /*check duration - we assume that scalable OCR streams are just pure nonsense...*/ @@ -378,7 +419,7 @@ refetch_AU: //gf_es_drop_au(ch); continue; } - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d selected as first layer (DTS %d)\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS, AU->DTS)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d (%s) AU DTS %d (size %d) selected as first layer (CTS %d)\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, ch->odm->net_service->url, AU->DTS, AU->dataLength, AU->CTS)); *nextAU = AU; *activeChannel = ch; curCTS = AU->CTS; @@ -389,7 +430,7 @@ refetch_AU: baseAU->data = gf_realloc(baseAU->data, baseAU->dataLength + AU->dataLength); memcpy(baseAU->data + baseAU->dataLength , AU->data, AU->dataLength); baseAU->dataLength += AU->dataLength; - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d reaggregated on base layer %d\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS, (*activeChannel)->esd->ESID)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d (%s) AU DTS %d reaggregated on base layer %d\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, ch->odm->net_service->url, AU->DTS, (*activeChannel)->esd->ESID)); gf_es_drop_au(ch); ch->first_au_fetched = 1; } @@ -413,35 +454,37 @@ refetch_AU: // AU found with the same CTS as the current base, we either had a drop on the base or some temporal scalability - aggregate from current channel. else { //we cannot tell whether this is a loss or temporal scalable, don't attempt to discard the AU - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d doesn't have the same CTS as the base (%d)- selected as first layer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS, (*nextAU)->CTS)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d (%s) AU CTS %d doesn't have the same CTS as the base (%d)- selected as first layer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, ch->odm->net_service->url, AU->CTS, (*nextAU)->CTS)); *nextAU = AU; *activeChannel = ch; - (*activeChannel)->prev_aggregated_dts = (*nextAU)->DTS; curCTS = AU->CTS; } } //we can rely on DTS - if DTS is earlier on the enhencement, this is a loss or temporal scalability else if (AU->DTS < (*nextAU)->DTS) { //Sample with the same DTS of this AU has been decoded. This is a loss, we need to drop it and re-fetch this channel - if (AU->DTS < (*activeChannel)->prev_aggregated_dts) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d: loss detected - re-fetch channel\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS)); + if (AU->DTS <= codec->last_unit_dts) + { + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d %s AU DTS %d but base DTS %d: loss detected - re-fetch channel\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, ch->odm->net_service->url, AU->DTS, (*nextAU)->DTS)); gf_es_drop_au(ch); goto refetch_AU; } //This is a temporal scalability so we re-aggregate from the enhencement else { - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d AU CTS %d selected as first layer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, AU->CTS)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d (%s) AU DTS %d selected as first layer (CTS %d)\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, ch->odm->net_service->url, AU->DTS, AU->CTS)); *nextAU = AU; *activeChannel = ch; - (*activeChannel)->prev_aggregated_dts = (*nextAU)->DTS; curCTS = AU->CTS; } } } } - - if (*nextAU) - (*activeChannel)->prev_aggregated_dts = (*nextAU)->DTS; + //scalable addon, browse channels in scalable object + if (current_odm->scalable_odm) { + current_odm = current_odm->scalable_odm; + src_channels = current_odm->channels; + goto browse_scalable; + } if (codec->is_reordering && *nextAU && codec->first_frame_dispatched) { if ((*activeChannel)->esd->slConfig->no_dts_signaling) { @@ -465,6 +508,7 @@ refetch_AU: (*nextAU)->CTS = (*nextAU)->DTS; } } + } /*scalable browsing of input channels: find the AU with the lowest DTS on all input channels*/ @@ -514,7 +558,8 @@ static GF_Err SystemCodec_Process(GF_Codec *codec, u32 TimeAvailable) { GF_DBUnit *AU; GF_Channel *ch; - u32 now, obj_time, mm_level, au_time, cts; + u32 obj_time, mm_level, au_time, cts; + u64 now; GF_Scene *scene_locked; Bool check_next_unit; GF_SceneDecoder *sdec = (GF_SceneDecoder *)codec->decio; @@ -617,15 +662,15 @@ check_unit: updates in time*/ codec->odm->current_time = gf_clock_time(codec->ck); - now = gf_term_get_time(codec->odm->term); + now = gf_sys_clock_high_res(); if (codec->odm->term->bench_mode==2) { e = GF_OK; } else { e = sdec->ProcessData(sdec, AU->data, AU->dataLength, ch->esd->ESID, au_time, mm_level); } - now = gf_term_get_time(codec->odm->term) - now; + now = gf_sys_clock_high_res() - now; - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d at %d decoded AU TS %d in %d ms\n", sdec->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, codec->odm->current_time, AU->CTS, now)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d#CH%d at %d decoded AU TS %d in "LLU" us\n", sdec->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, codec->odm->current_time, AU->CTS, now)); codec_update_stats(codec, AU->dataLength, now, AU->DTS); codec->prev_au_size = AU->dataLength; @@ -671,7 +716,7 @@ exit: /*special handling of decoders not using ESM*/ static GF_Err PrivateScene_Process(GF_Codec *codec, u32 TimeAvailable) { - u32 now; + u64 now; GF_Channel *ch; GF_Scene *scene_locked; GF_SceneDecoder *sdec = (GF_SceneDecoder *)codec->decio; @@ -715,13 +760,13 @@ static GF_Err PrivateScene_Process(GF_Codec *codec, u32 TimeAvailable) if (!gf_mx_try_lock(scene_locked->root_od->term->compositor->mx)) return GF_OK; - now = gf_term_get_time(codec->odm->term); + now = gf_sys_clock_high_res(); if (codec->odm->term->bench_mode == 2) { e = GF_OK; } else { e = sdec->ProcessData(sdec, NULL, 0, ch->esd->ESID, codec->odm->current_time, GF_CODEC_LEVEL_NORMAL); } - now = gf_term_get_time(codec->odm->term) - now; + now = gf_sys_clock_high_res() - now; codec->last_unit_dts ++; /*resume on error*/ if (e && (codec->last_unit_dts<2) ) { @@ -759,17 +804,7 @@ static GFINLINE GF_Err LockCompositionUnit(GF_Codec *dec, u32 CU_TS, GF_CMUnit * static GFINLINE GF_Err UnlockCompositionUnit(GF_Codec *dec, GF_CMUnit *CU, u32 cu_size) { - - /*temporal scalability disabling: if we already rendered this, no point getting further*/ -/* - if (CU->TS < dec->CB->LastRenderedTS) { - GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[ODM] CU (TS %d) later than last frame drawn (TS %d) - droping\n", CU->TS, dec->CB->LastRenderedTS)); - cu_size = 0; - } - -*/ - - if (dec->is_reordering) { + if (dec->is_reordering && !dec->trusted_cts) { /*first dispatch from decoder, store CTS*/ if (!dec->first_frame_dispatched) { dec->recomputed_cts = CU->TS; @@ -839,8 +874,9 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) GF_DBUnit *AU; GF_Channel *ch, *prev_ch; Bool drop_late_frames = 0; + u64 now, entryTime; u32 mmlevel, cts; - u32 first, entryTime, now, obj_time, unit_size; + u32 first, obj_time, unit_size; GF_MediaDecoder *mdec = (GF_MediaDecoder*)codec->decio; GF_Err e = GF_OK; CU = NULL; @@ -849,13 +885,16 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) if audio codec muted we dispatch to keep sync in place*/ if (codec->Muted && (codec->type==GF_STREAM_VISUAL) ) return GF_OK; - entryTime = gf_term_get_time(codec->odm->term); + //cannot output frame, do nothing (we force a channel query before for pull mode) + if (codec->CB->Capacity == codec->CB->UnitCount) { + if (codec->CB->UnitCount > 1) return GF_OK; + else if (codec->direct_vout) return GF_OK; + } + + entryTime = gf_sys_clock_high_res(); if (!codec->odm->term->bench_mode && (codec->odm->term->flags & GF_TERM_DROP_LATE_FRAMES)) drop_late_frames = 1; - //cannot output frame, do nothing (we force a channel query before for pull mode) - if ( (codec->CB->UnitCount > 1) && (codec->CB->Capacity == codec->CB->UnitCount) ) - return GF_OK; /*fetch next AU in DTS order for this codec*/ MediaDecoder_GetNextAU(codec, &ch, &AU); @@ -871,7 +910,7 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) assert( CU ); unit_size = 0; if (codec->odm->term->bench_mode != 2) { - e = mdec->ProcessData(mdec, NULL, 0, 0, CU->data, &unit_size, 0, 0); + e = mdec->ProcessData(mdec, NULL, 0, 0, &CU->TS, CU->data, &unit_size, 0, 0); if (e==GF_OK) { e = UnlockCompositionUnit(codec, CU, unit_size); if (unit_size) return GF_OK; @@ -884,7 +923,7 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) } } /*if no data, and channel not buffering, ABORT CB buffer (data timeout or EOS not detectable)*/ - else if (ch && !ch->BufferOn && !ch->last_au_was_seek) + else if (ch && !ch->is_pulling && !ch->BufferOn && !ch->last_au_was_seek) gf_cm_abort_buffering(codec->CB); //GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d: No data in decoding buffer\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID)); @@ -893,6 +932,7 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) /*get the object time*/ obj_time = gf_clock_time(codec->ck); + /*Media Time for media codecs is updated in the CB*/ if (!codec->CB) { @@ -1001,7 +1041,7 @@ static GF_Err MediaCodec_Process(GF_Codec *codec, u32 TimeAvailable) scalable_retry: - now = gf_term_get_time(codec->odm->term); + now = gf_sys_clock_high_res(); assert( CU ); if (!CU->data && unit_size && !codec->CB->no_allocation) { @@ -1011,9 +1051,9 @@ scalable_retry: gf_cm_abort_buffering(codec->CB); } else { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d ES%d at %d decoding frame DTS %d CTS %d size %d (%d in channels)\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->DTS, AU->CTS, AU->dataLength, ch->AU_Count)); - e = mdec->ProcessData(mdec, AU->data, AU->dataLength, ch->esd->ESID, CU->data, &unit_size, AU->PaddingBits, mmlevel); + e = mdec->ProcessData(mdec, AU->data, AU->dataLength, ch->esd->ESID, &CU->TS, CU->data, &unit_size, AU->PaddingBits, mmlevel); } - now = gf_term_get_time(codec->odm->term) - now; + now = gf_sys_clock_high_res() - now; if (codec->Status == GF_ESM_CODEC_STOP) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because codec has been stopped\n", codec->decio->module_name)); return GF_OK; @@ -1046,7 +1086,7 @@ scalable_retry: } e = UnlockCompositionUnit(codec, CU, unit_size); - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d ES%d at %d decoded packed frame TS %d in %d ms\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d ES%d at %d decoded packed frame TS %d in "LLU" us\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now)); if (ch->skip_sl) { if (codec->bytes_per_sec) { codec->cur_audio_bytes += unit_size; @@ -1071,18 +1111,23 @@ scalable_retry: processing a scalable stream*/ case GF_OK: if (unit_size) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d ES%d at %d decoded frame TS %d in %d ms (DTS %d - size %d) - %d in CB\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now, AU->DTS, AU->dataLength, codec->CB->UnitCount + 1)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] ODM%d ES%d at %d decoded frame TS %d in "LLU" us (DTS %d - size %d) - %d in CB\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now, AU->DTS, AU->dataLength, codec->CB->UnitCount + 1)); if (codec->direct_vout) { e = mdec->GetOutputBuffer(mdec, ch->esd->ESID, &codec->CB->pY, &codec->CB->pU, &codec->CB->pV); + if (e==GF_OK) { + gf_sc_set_video_pending_frame(codec->odm->term->compositor); + } } } - /*if no size the decoder is not using the composition memory - if the object is in intitial buffering resume it!!*/ - else if (codec->CB->Status == CB_BUFFER) { +#if 0 + /*if no size and the decoder is not using the composition memory - if the object is in intitial buffering resume it!!*/ + else if ( (!codec->CB->UnitSize && !codec->CB->Capacity) && (codec->CB->Status == CB_BUFFER)) { codec->nb_dispatch_skipped++; if (codec->nb_dispatch_skipped==codec->CB->UnitCount) gf_cm_abort_buffering(codec->CB); } +#endif codec_update_stats(codec, AU->dataLength, now, AU->DTS); if (ch->skip_sl) { @@ -1106,7 +1151,7 @@ scalable_retry: unit_size = 0; /*error - if the object is in intitial buffering resume it!!*/ gf_cm_abort_buffering(codec->CB); - GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[%s] ODM%d ES%d At %d (frame TS %d - %d ms ): decoded error %s\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now, gf_error_to_string(e) )); + GF_LOG(GF_LOG_INFO, GF_LOG_CODEC, ("[%s] ODM%d ES%d At %d (frame TS %d - "LLU" us ): decoded error %s\n", codec->decio->module_name, codec->odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_real_time(ch->clock), AU->CTS, now, gf_error_to_string(e) )); e = GF_OK; break; } @@ -1121,9 +1166,9 @@ scalable_retry: #ifndef GPAC_DISABLE_LOG if (unit_size) { if (ch->is_pulling) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[%s] at %d decoded frame CTS %d in %d ms\n", codec->decio->module_name, gf_clock_real_time(ch->clock), AU->CTS, now)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[%s] at %d decoded frame CTS %d in "LLU" us\n", codec->decio->module_name, gf_clock_real_time(ch->clock), AU->CTS, now)); } else { - GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[%s] at %d decoded frame CTS %d in %d ms - %d AU in channel\n", codec->decio->module_name, gf_clock_real_time(ch->clock), AU->CTS, now, ch->AU_Count)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[%s] at %d decoded frame CTS %d in "LLU" us - %d AU in channel\n", codec->decio->module_name, gf_clock_real_time(ch->clock), AU->CTS, now, ch->AU_Count)); } } #endif @@ -1162,19 +1207,19 @@ scalable_retry: GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because no more input data\n", codec->decio->module_name)); return GF_OK; } - now = gf_term_get_time(codec->odm->term) - entryTime; + now = gf_sys_clock_high_res() - entryTime; /*escape from decoding loop only if above critical limit - this is to avoid starvation on audio*/ if (!ch->esd->dependsOnESID && (codec->CB->UnitCount > codec->CB->Min)) { if (now >= TimeAvailable) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because time is up: %d vs %d available\n", codec->decio->module_name, now, TimeAvailable)); return GF_OK; } - } else if (now >= 10*TimeAvailable) { - GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because running for too long: %d vs %d available\n", codec->decio->module_name, now, TimeAvailable)); + } else if (now >= 10000*TimeAvailable) { + GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because running for too long: %d vs %d available\n", codec->decio->module_name, now/1000, TimeAvailable)); + return GF_OK; + } else if (codec->odm->term->bench_mode) { return GF_OK; } - MediaDecoder_GetNextAU(codec, &ch, &AU); - if (!ch || !AU) return GF_OK; } return GF_OK; } @@ -1356,7 +1401,8 @@ void gf_codec_set_status(GF_Codec *codec, u32 Status) codec->prev_au_size = 0; codec->Status = Status; codec->last_stat_start = codec->cur_bit_size = codec->max_bit_rate = codec->avg_bit_rate = 0; - codec->nb_dec_frames = codec->total_dec_time = codec->max_dec_time = 0; + codec->nb_dec_frames = 0; + codec->total_dec_time = codec->max_dec_time = 0; codec->cur_audio_bytes = codec->cur_video_frames = 0; codec->nb_droped = 0; codec->nb_repeted_frames = 0; diff --git a/src/terminal/media_control.c b/src/terminal/media_control.c index 1df75e8..2612298 100644 --- a/src/terminal/media_control.c +++ b/src/terminal/media_control.c @@ -390,6 +390,7 @@ void RenderMediaControl(GF_Node *node, void *rs, Bool is_destroy) stack->media_start = stack->control->mediaStartTime; stack->media_stop = stack->control->mediaStopTime; stack->is_init = 1; + stack->paused = 0; /*the object has already been started, and media start time is not 0, restart*/ if (stack->stream->num_open) { if ( (stack->media_start > 0) || (gf_list_count(stack->seg)>0 ) ) { @@ -467,7 +468,8 @@ void MC_Modified(GF_Node *node) if (!stack) return; if (stack->changed!=2) { /*check URL*/ - if (MC_URLChanged(&stack->url, &stack->control->url)) stack->changed = 2; + if (MC_URLChanged(&stack->url, &stack->control->url)) + stack->changed = 2; /*check speed (play/pause)*/ else if (stack->media_speed != stack->control->mediaSpeed) stack->changed = 1; diff --git a/src/terminal/media_manager.c b/src/terminal/media_manager.c index d480c5d..9598962 100644 --- a/src/terminal/media_manager.c +++ b/src/terminal/media_manager.c @@ -270,7 +270,7 @@ Bool gf_term_find_codec(GF_Terminal *term, GF_Codec *codec) return 0; } -static u32 MM_SimulationStep_Decoder(GF_Terminal *term) +static u32 MM_SimulationStep_Decoder(GF_Terminal *term, u32 *nb_active_decs) { CodecEntry *ce; GF_Err e; @@ -295,10 +295,10 @@ static u32 MM_SimulationStep_Decoder(GF_Terminal *term) count = gf_list_count(term->codecs); time_left = term->frame_duration; + *nb_active_decs = 0; if (term->last_codec >= count) term->last_codec = 0; remain = count; - time_taken = 0; /*this is ultra basic a nice scheduling system would be much better*/ while (remain) { ce = (CodecEntry*)gf_list_get(term->codecs, term->last_codec); @@ -313,7 +313,7 @@ static u32 MM_SimulationStep_Decoder(GF_Terminal *term) time_slice = ce->dec->Priority * time_left / term->cumulated_priority; if (ce->dec->PriorityBoost) time_slice *= 2; time_taken = gf_sys_clock(); - + (*nb_active_decs) ++; e = gf_codec_process(ce->dec, time_slice); time_taken = gf_sys_clock() - time_taken; /*avoid signaling errors too often...*/ @@ -364,26 +364,31 @@ u32 MM_Loop(void *par) // GF_LOG(GF_LOG_DEBUG, GF_LOG_RTI, ("(RTI] Terminal Cycle Log\tServices\tDecoders\tCompositor\tSleep\n")); while (term->flags & GF_TERM_RUNNING) { + u32 nb_decs = 0; u32 left = 0; - if (do_codec) left = MM_SimulationStep_Decoder(term); + if (do_codec) left = MM_SimulationStep_Decoder(term, &nb_decs); else left = term->frame_duration; if (do_scene) { + u32 ms_until_next=0; u32 time_taken = gf_sys_clock(); - gf_sc_draw_frame(term->compositor); + gf_sc_draw_frame(term->compositor, &ms_until_next); time_taken = gf_sys_clock() - time_taken; - if (left>time_taken) + if (ms_until_nextframe_duration/2) { + left = 0; + } else if (left>time_taken) left -= time_taken; else left = 0; } - if (do_regulate) { if (term->bench_mode) { gf_sleep(0); } else { if (left==term->frame_duration) { - gf_sleep(term->frame_duration/2); + //if nothing was done during this pass but we have active decoder, just yield. We don't want to sleep since + //composition memory could be released at any time. We should have a signal here, rather than a wait + gf_sleep(nb_decs ? 0 : term->frame_duration/2); } } } @@ -395,20 +400,20 @@ u32 MM_Loop(void *par) u32 RunSingleDec(void *ptr) { GF_Err e; - u32 time_left; + u64 time_taken; CodecEntry *ce = (CodecEntry *) ptr; GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[MediaDecoder %d] Entering thread ID %d\n", ce->dec->odm->OD->objectDescriptorID, gf_th_id() )); while (ce->flags & GF_MM_CE_RUNNING) { - time_left = gf_sys_clock(); + time_taken = gf_sys_clock_high_res(); if (!ce->dec->force_cb_resize) { gf_mx_p(ce->mx); e = gf_codec_process(ce->dec, ce->dec->odm->term->frame_duration); if (e) gf_term_message(ce->dec->odm->term, ce->dec->odm->net_service->url, "Decoding Error", e); gf_mx_v(ce->mx); } - time_left = gf_sys_clock() - time_left; + time_taken = gf_sys_clock_high_res() - time_taken; /*no priority boost this way for systems codecs, priority is dynamically set by not releasing the @@ -419,10 +424,8 @@ u32 RunSingleDec(void *ptr) /*while on don't sleep*/ if (ce->dec->PriorityBoost) continue; - if (time_left) { + if (time_taken<20) { gf_sleep(1); - } else { - gf_sleep(ce->dec->odm->term->frame_duration/2); } } ce->flags |= GF_MM_CE_DEAD; @@ -437,6 +440,7 @@ void gf_term_start_codec(GF_Codec *codec, Bool is_resume) GF_CodecCapability cap; CodecEntry *ce; GF_Terminal *term = codec->odm->term; + if (!gf_list_count(codec->odm->channels)) return; ce = mm_get_codec(term->codecs, codec); if (!ce) return; @@ -469,6 +473,7 @@ void gf_term_start_codec(GF_Codec *codec, Bool is_resume) } } + /*unlock dec*/ if (ce->mx) gf_mx_v(ce->mx); @@ -650,14 +655,20 @@ void gf_term_set_priority(GF_Terminal *term, s32 Priority) GF_EXPORT u32 gf_term_process_step(GF_Terminal *term) { + u32 nb_decs=0; u32 time_taken = gf_sys_clock(); if (term->flags & GF_TERM_NO_DECODER_THREAD) { - MM_SimulationStep_Decoder(term); + MM_SimulationStep_Decoder(term, &nb_decs); } if (term->flags & GF_TERM_NO_COMPOSITOR_THREAD) { - gf_sc_draw_frame(term->compositor); + u32 ms_until_next; + gf_sc_draw_frame(term->compositor, &ms_until_next); + if (ms_until_nextcompositor->frame_duration/2) { + time_taken=0; + } + } time_taken = gf_sys_clock() - time_taken; if (time_taken > term->compositor->frame_duration) { @@ -668,7 +679,7 @@ u32 gf_term_process_step(GF_Terminal *term) if (term->bench_mode || (term->user->init_flags & GF_TERM_NO_REGULATION)) return time_taken; if (2*time_taken >= term->compositor->frame_duration) { - gf_sleep(time_taken); + gf_sleep(nb_decs ? 1 : time_taken); } return time_taken; } @@ -693,7 +704,7 @@ GF_Err gf_term_process_flush(GF_Terminal *term) gf_mx_v(term->mm_mx); } - if (!gf_sc_draw_frame(term->compositor)) + if (!gf_sc_draw_frame(term->compositor, NULL)) break; if (! (term->user->init_flags & GF_TERM_NO_REGULATION)) diff --git a/src/terminal/media_memory.c b/src/terminal/media_memory.c index 9566739..9b74865 100644 --- a/src/terminal/media_memory.c +++ b/src/terminal/media_memory.c @@ -148,10 +148,10 @@ GF_CompositionMemory *gf_cm_new(u32 UnitSize, u32 capacity, Bool no_allocation) void gf_cm_del(GF_CompositionMemory *cb) { gf_odm_lock(cb->odm, 1); - /*may happen when CB is destroyed right after creation in case*/ + /*may happen when CB is destroyed right after creation */ if (cb->Status == CB_BUFFER) { gf_clock_buffer_off(cb->odm->codec->ck); - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] CB destroy - ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); } if (cb->input){ /*break the loop and destroy*/ @@ -324,6 +324,14 @@ exit: #endif } +static void cb_set_buffer_off(GF_CompositionMemory *cb) +{ + gf_clock_buffer_off(cb->odm->codec->ck); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] CB Buffering done ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); + + gf_term_service_media_event(cb->odm->parentscene->root_od, GF_EVENT_MEDIA_CANPLAY); +} + void gf_cm_unlock_input(GF_CompositionMemory *cb, GF_CMUnit *cu, u32 cu_size, Bool codec_reordering) { /*nothing dispatched, ignore*/ @@ -347,16 +355,12 @@ void gf_cm_unlock_input(GF_CompositionMemory *cb, GF_CMUnit *cu, u32 cu_size, Bo cu->dataLength = cu_size; cu->RenderedLength = 0; - /*turn off buffering - this must be done now rather than when fetching first output frame since we're not + /*turn off buffering for audio - this must be done now rather than when fetching first output frame since we're not sure output is fetched (Switch node, ...)*/ - if ( (cb->Status == CB_BUFFER) && (cb->UnitCount >= cb->Capacity) ) { + if ( (cb->Status == CB_BUFFER) && (cb->UnitCount >= cb->Capacity) && (cb->odm->codec->type == GF_STREAM_AUDIO)) { /*done with buffering, signal to the clock (ONLY ONCE !)*/ cb->Status = CB_BUFFER_DONE; - gf_clock_buffer_off(cb->odm->codec->ck); -// cb->odm->codec->ck->data_timeout = 0; - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); - - gf_term_service_media_event(cb->odm->parentscene->root_od, GF_EVENT_MEDIA_CANPLAY); + cb_set_buffer_off(cb); } //new FPS regulation doesn't need this signaling @@ -532,6 +536,7 @@ GF_CMUnit *gf_cm_get_output(GF_CompositionMemory *cb) /*force update of media time*/ mediasensor_update_timing(cb->odm, 1); #endif + gf_odm_signal_eos(cb->odm); } return NULL; } @@ -556,26 +561,48 @@ GF_CMUnit *gf_cm_get_output(GF_CompositionMemory *cb) } - /*drop the output CU*/ -void gf_cm_drop_output(GF_CompositionMemory *cb) +void gf_cm_output_kept(GF_CompositionMemory *cb) { assert(cb->UnitCount); /*this allows reuse of the CU*/ cb->output->RenderedLength = 0; cb->LastRenderedTS = cb->output->TS; + if (cb->Status==CB_BUFFER) { + cb_set_buffer_off(cb); + cb->Status=CB_PLAY; + } +} + +/*drop the output CU*/ +void gf_cm_drop_output(GF_CompositionMemory *cb) +{ + gf_cm_output_kept(cb); + if (cb->Status!=CB_PLAY) { + return; + } + /*WARNING: in RAW mode, we (for the moment) only have one unit - setting output->dataLength to 0 means the input is available for the raw channel - we have to make sure the output is completely reseted before releasing the sema*/ /*on visual streams (except raw oness), always keep the last AU*/ - if (!cb->no_allocation && cb->output->dataLength && (cb->odm->codec->type == GF_STREAM_VISUAL) ) { + if (cb->output->dataLength && (cb->odm->codec->type == GF_STREAM_VISUAL) ) { if ( !cb->output->next->dataLength || (cb->Capacity == 1) ) { - if (cb->odm->raw_frame_sema) { - cb->output->dataLength = 0; - gf_sema_notify(cb->odm->raw_frame_sema, 1); + Bool no_drop = 1; + if (cb->no_allocation ) { + if (cb->odm->term->bench_mode) + no_drop = 0; + else if (gf_clock_time(cb->odm->codec->ck) > cb->output->TS) + no_drop = 0; + } + if (no_drop) { + if (cb->odm->raw_frame_sema) { + cb->output->dataLength = 0; + gf_sema_notify(cb->odm->raw_frame_sema, 1); + } + return; } - return; } } @@ -596,6 +623,9 @@ void gf_cm_drop_output(GF_CompositionMemory *cb) void gf_cm_set_status(GF_CompositionMemory *cb, u32 Status) { + if (cb->Status == Status) + return; + gf_odm_lock(cb->odm, 1); /*if we're asked for play, trigger on buffering*/ if (Status == CB_PLAY) { @@ -619,13 +649,16 @@ void gf_cm_set_status(GF_CompositionMemory *cb, u32 Status) cb->LastRenderedTS = 0; if (cb->Status == CB_BUFFER) { gf_clock_buffer_off(cb->odm->codec->ck); - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] CB status changed - ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); } if (Status == CB_STOP) { gf_cm_reset(cb); cb->LastRenderedTS = 0; } cb->Status = Status; + if (Status==CB_BUFFER) { + gf_clock_buffer_on(cb->odm->codec->ck); + } } gf_odm_lock(cb->odm, 0); @@ -641,8 +674,7 @@ void gf_cm_set_eos(GF_CompositionMemory *cb) if (cb->Status == CB_BUFFER) { cb->Status = CB_BUFFER_DONE; gf_clock_buffer_off(cb->odm->codec->ck); -// cb->odm->codec->ck->data_timeout = 0; - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); + GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] CB EOS - ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); } cb->HasSeenEOS = 1; @@ -663,7 +695,6 @@ Bool gf_cm_is_running(GF_CompositionMemory *cb) return !cb->odm->codec->ck->Paused; if ((cb->Status == CB_BUFFER_DONE) && (gf_clock_is_started(cb->odm->codec->ck) || cb->odm->term->play_state) ) { - cb->Status = CB_PLAY; return 1; } @@ -686,8 +717,6 @@ void gf_cm_abort_buffering(GF_CompositionMemory *cb) { if (cb->Status == CB_BUFFER) { cb->Status = CB_BUFFER_DONE; - gf_clock_buffer_off(cb->odm->codec->ck); -// cb->odm->codec->ck->data_timeout = 0; - GF_LOG(GF_LOG_DEBUG, GF_LOG_SYNC, ("[SyncLayer] ODM%d: buffering off at %d (nb buffering on clock: %d)\n", cb->odm->OD->objectDescriptorID, gf_term_get_time(cb->odm->term), cb->odm->codec->ck->Buffering)); + cb_set_buffer_off(cb); } } diff --git a/src/terminal/media_memory.h b/src/terminal/media_memory.h index 51feb36..6470384 100644 --- a/src/terminal/media_memory.h +++ b/src/terminal/media_memory.h @@ -147,9 +147,10 @@ void gf_cm_rewind_input(GF_CompositionMemory *cb); /*fetch output buffer, NULL if output is empty*/ GF_CMUnit *gf_cm_get_output(GF_CompositionMemory *cb); -/*release the output buffer once rendered - if renderedLength is not equal to dataLength the -output is NOT droped*/ +/*release the output buffer once rendered */ void gf_cm_drop_output(GF_CompositionMemory *cb); +/*notifies the output has not been discarded: sets render length to 0 and check clock resume if needed*/ +void gf_cm_output_kept(GF_CompositionMemory *cb); /*reset the entire memory*/ void gf_cm_reset(GF_CompositionMemory *cb); diff --git a/src/terminal/media_object.c b/src/terminal/media_object.c index f94e639..374ef63 100644 --- a/src/terminal/media_object.c +++ b/src/terminal/media_object.c @@ -359,8 +359,9 @@ void gf_mo_update_caps(GF_MediaObject *mo) } } + GF_EXPORT -char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, u32 *ms_until_next) +char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestamp, u32 *size, s32 *ms_until_pres, s32 *ms_until_next) { GF_Codec *codec; Bool force_decode = GF_FALSE; @@ -369,8 +370,6 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam s32 diff; Bool bench_mode; - - *eos = GF_FALSE; *eos = GF_FALSE; *timestamp = mo->timestamp; *size = mo->framesize; @@ -416,9 +415,10 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam } } - /*fast forward, force decode if no data is available*/ - if (! *eos && (codec->ck->speed > FIX_ONE)) + /*fast forward, bench mode with composition memory: force decode if no data is available*/ + if (! *eos && ((codec->ck->speed > FIX_ONE) || (codec->odm->term->bench_mode && !codec->CB->no_allocation) || (codec->type==GF_STREAM_AUDIO) ) ) force_decode = GF_TRUE; + if (force_decode) { u32 retry=100; @@ -430,8 +430,9 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam break; } retry--; + gf_sleep(0); } - if (!retry) { + if (!retry && codec->force_cb_resize) { GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM%d] At %d could not resize and decode next frame in one pass - blank frame after TS %d\n", mo->odm->OD->objectDescriptorID, gf_clock_time(codec->ck), mo->timestamp)); } if (!gf_odm_lock_mo(mo)) @@ -449,9 +450,9 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam /*note this assert is NOT true when recomputing DTS from CTS on the fly (MPEG1/2 RTP and H264/AVC RTP)*/ //assert(CU->TS >= codec->CB->LastRenderedTS); - if (codec->CB->UnitCount==1) resync = GF_FALSE; + if (codec->CB->UnitCount<=1) resync = GF_FALSE; - if (bench_mode) { + if (bench_mode && resync) { resync = GF_FALSE; if (mo->timestamp == CU->TS) { if (CU->next->dataLength) { @@ -461,11 +462,23 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam } } + /*resync*/ obj_time = gf_clock_time(codec->ck); + + //no drop mode: all frames are presented, we discard the current output only if already presented and next frame time is mature + if (!(mo->odm->term->flags & GF_TERM_DROP_LATE_FRAMES) && (mo->type==GF_MEDIA_OBJECT_VIDEO)) { + resync=GF_FALSE; + if (gf_clock_is_started(mo->odm->codec->ck) && (mo->timestamp==CU->TS) && CU->next->dataLength && (CU->next->TS <= obj_time) ) { + gf_cm_drop_output(codec->CB); + CU = gf_cm_get_output(codec->CB); + } + } + if (resync) { u32 nb_droped = 0; while (CU->TS < obj_time) { + u32 diff; if (!CU->next->dataLength) { if (force_decode) { obj_time = gf_clock_time(codec->ck); @@ -481,6 +494,11 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam break; } } + diff = CU->next->TS; + diff -= CU->TS; + if (CU->TS + codec->CB->Capacity*diff > obj_time) { + break; + } /*figure out closest time*/ if (CU->next->TS > obj_time) { *eos = GF_FALSE; @@ -525,7 +543,7 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam /*signal EOS after rendering last frame, not while rendering it*/ *eos = GF_FALSE; - GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u fetch frame TS %u size %d (previous TS %d) - %d unit in CB - UTC "LLU" ms - %d ms until CTS is due - %d ms until next frame\n", mo->odm->OD->objectDescriptorID, gf_clock_time(codec->ck), CU->TS, mo->framesize, mo->timestamp, codec->CB->UnitCount, gf_net_get_utc(), mo->ms_until_pres, mo->ms_until_next )); + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d (%s)] At OTB %u fetch frame TS %u size %d (previous TS %d) - %d unit in CB - UTC "LLU" ms - %d ms until CTS is due - %d ms until next frame\n", mo->odm->OD->objectDescriptorID, mo->odm->net_service->url, gf_clock_time(codec->ck), CU->TS, mo->framesize, mo->timestamp, codec->CB->UnitCount, gf_net_get_utc(), mo->ms_until_pres, mo->ms_until_next )); } /*also adjust CU time based on consummed bytes in input, since some codecs output very large audio chunks*/ @@ -546,7 +564,7 @@ char *gf_mo_fetch_data(GF_MediaObject *mo, Bool resync, Bool *eos, u32 *timestam gf_term_service_media_event(mo->odm, GF_EVENT_MEDIA_TIME_UPDATE); gf_odm_lock(mo->odm, 0); - if (codec->direct_vout) return codec->CB->pY; + if (codec->direct_vout) return (char *) codec->CB->pY; return mo->frame; } @@ -561,7 +579,7 @@ GF_Err gf_mo_get_raw_image_planes(GF_MediaObject *mo, u8 **pY_or_RGB, u8 **pU, u } GF_EXPORT -void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop) +void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 drop_mode) { #if 0 u32 obj_time; @@ -577,8 +595,14 @@ void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop) gf_odm_lock(mo->odm, 0); return; } + +/* if ((drop_mode==0) && !(mo->odm->term->flags & GF_TERM_DROP_LATE_FRAMES) && (mo->type==GF_MEDIA_OBJECT_VIDEO)) + drop_mode=1; + else +*/ if (mo->odm->codec->CB->no_allocation) - forceDrop = 1; + drop_mode = 1; + /*perform a sanity check on TS since the CB may have changed status - this may happen in temporal scalability only*/ @@ -590,10 +614,10 @@ void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop) mo->odm->codec->CB->output->RenderedLength += nb_bytes; } - if (forceDrop<0) { + if (drop_mode<0) { /*only allow for explicit last frame keeping if only one node is using the resource otherwise this would block the composition memory*/ - if (mo->num_open>1) forceDrop=0; + if (mo->num_open>1) drop_mode=0; else { gf_odm_lock(mo->odm, 0); return; @@ -602,26 +626,14 @@ void gf_mo_release_data(GF_MediaObject *mo, u32 nb_bytes, s32 forceDrop) /*discard frame*/ if (mo->odm->codec->CB->output->RenderedLength == mo->odm->codec->CB->output->dataLength) { - if (forceDrop) { + if (drop_mode) { gf_cm_drop_output(mo->odm->codec->CB); - forceDrop--; -// if (forceDrop) mo->odm->codec->nb_droped++; + GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[ODM%d] At OTB %u drop frame TS %u\n", mo->odm->OD->objectDescriptorID, gf_clock_time(mo->odm->codec->ck), mo->timestamp)); } else { /*we cannot drop since we don't know the speed of the playback (which can even be frame by frame)*/ -#if 0 - obj_time = gf_clock_time(mo->odm->codec->ck); - if (mo->odm->codec->CB->output->next->dataLength) { - if (2*obj_time < mo->timestamp + mo->odm->codec->CB->output->next->TS ) { - mo->odm->codec->CB->output->RenderedLength = 0; - } else { - gf_cm_drop_output(mo->odm->codec->CB); - } - } else { - gf_cm_drop_output(mo->odm->codec->CB); - } -#else - mo->odm->codec->CB->output->RenderedLength = 0; -#endif + + //notif CB we kept the output + gf_cm_output_kept(mo->odm->codec->CB); } } } @@ -1213,11 +1225,15 @@ GF_SceneGraph *gf_mo_get_scenegraph(GF_MediaObject *mo) GF_EXPORT GF_DOMEventTarget *gf_mo_event_target_add_node(GF_MediaObject *mo, GF_Node *n) { +#ifndef GPAC_DISABLE_SVG GF_DOMEventTarget *target = NULL; if (!mo ||!n) return NULL; - target = gf_html_media_get_event_target_from_node(n); + target = gf_dom_event_get_target_from_node(n); gf_list_add(mo->evt_targets, target); return target; +#else + return NULL; +#endif } GF_Err gf_mo_event_target_remove(GF_MediaObject *mo, GF_DOMEventTarget *target) @@ -1266,7 +1282,9 @@ GF_Err gf_mo_event_target_remove_by_node(GF_MediaObject *mo, GF_Node *node) GF_DOMEventTarget *target = (GF_DOMEventTarget *)gf_list_get(mo->evt_targets, i); if (target->ptr == node) { gf_list_del_item(mo->evt_targets, target); - return GF_OK; + i--; + count--; + //return GF_OK; } } return GF_BAD_PARAM; @@ -1275,7 +1293,7 @@ GF_Err gf_mo_event_target_remove_by_node(GF_MediaObject *mo, GF_Node *node) GF_EXPORT GF_Node *gf_event_target_get_node(GF_DOMEventTarget *target) { - if (target && (target->ptr_type == GF_DOM_EVENT_TARGET_HTML_MEDIA)) { + if (target && (target->ptr_type == GF_DOM_EVENT_TARGET_NODE)) { return (GF_Node *)target->ptr; } return NULL; diff --git a/src/terminal/mpeg4_inline.c b/src/terminal/mpeg4_inline.c index 11f246f..4fbd637 100644 --- a/src/terminal/mpeg4_inline.c +++ b/src/terminal/mpeg4_inline.c @@ -95,7 +95,7 @@ void gf_inline_on_modified(GF_Node *node) if (mo->num_open) { if (!changed) return; - gf_scene_notify_event(scene, GF_EVENT_UNLOAD, node, NULL, GF_OK); + gf_scene_notify_event(scene, GF_EVENT_UNLOAD, node, NULL, GF_OK, GF_TRUE); gf_node_dirty_parents(node); gf_mo_event_target_remove_by_node(mo, node); @@ -148,7 +148,6 @@ void gf_inline_on_modified(GF_Node *node) In such a case we would end up in a deadlock - this needs urgent fixing ... */ - if (ODID) { /*if no parent we must process the url change as we may not be traversed later on (not in the scene tree)*/ if (gf_node_get_parent(node, 0)==NULL) { @@ -238,7 +237,7 @@ static void gf_inline_traverse(GF_Node *n, void *rs, Bool is_destroy) if (!scene) return; mo = scene->root_od ? scene->root_od->mo : NULL; - gf_scene_notify_event(scene, GF_EVENT_UNLOAD, n, NULL, GF_OK); + gf_scene_notify_event(scene, GF_EVENT_UNLOAD, n, NULL, GF_OK, GF_TRUE); if (!mo) return; gf_mo_event_target_remove_by_node(mo, n); @@ -290,6 +289,21 @@ static void gf_inline_traverse(GF_Node *n, void *rs, Bool is_destroy) } } + /*if not attached return (attaching the graph cannot be done in render since render is not called while unattached :) */ + if (!scene->graph_attached) { + /*just like protos, we must invalidate parent graph until attached*/ + gf_node_dirty_set(n, 0, GF_TRUE); + return; + } + /*clear dirty flags for any sub-inlines, bitmaps or protos*/ + gf_node_dirty_clear(n, 0); + + current_url = scene->current_url; + scene->current_url = & ((M_Inline*)n)->url; + gf_sc_traverse_subscene(scene->root_od->term->compositor, n, scene->graph, rs); + scene->current_url = current_url; + + //do we have to restart for next frame ? If so let's do it gf_inline_check_restart(scene); /*if we need to restart, shutdown graph and do it*/ @@ -311,19 +325,6 @@ static void gf_inline_traverse(GF_Node *n, void *rs, Bool is_destroy) return; } - /*if not attached return (attaching the graph cannot be done in render since render is not called while unattached :) */ - if (!scene->graph_attached) { - /*just like protos, we must invalidate parent graph until attached*/ - gf_node_dirty_set(n, 0, GF_TRUE); - return; - } - /*clear dirty flags for any sub-inlines, bitmaps or protos*/ - gf_node_dirty_clear(n, 0); - - current_url = scene->current_url; - scene->current_url = & ((M_Inline*)n)->url; - gf_sc_traverse_subscene(scene->root_od->term->compositor, n, scene->graph, rs); - scene->current_url = current_url; } diff --git a/src/terminal/network_service.c b/src/terminal/network_service.c index 20065a7..bc61081 100644 --- a/src/terminal/network_service.c +++ b/src/terminal/network_service.c @@ -54,7 +54,7 @@ static void term_on_message(void *user_priv, GF_ClientService *service, GF_Err e sprintf(szMsg, "!! UDP down (%s) - Retrying with TCP !!\n", message); gf_term_message(term, service->url, szMsg, GF_IP_NETWORK_FAILURE); - /*reload scene*/ + /*reload scene - FIXME this shall work on inline nodes, not on the root !*/ if (term->reload_url) gf_free(term->reload_url); term->reload_state = 1; term->reload_url = gf_strdup(term->root_scene->root_od->net_service->url); @@ -107,7 +107,7 @@ static void term_on_connect(void *user_priv, GF_ClientService *service, LPNETCHA evt.connect.is_connected = 0; gf_term_send_event(term, &evt); } else { - if (root->subscene) gf_scene_notify_event(root->subscene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, err); + if (root->subscene) gf_scene_notify_event(root->subscene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, err, GF_FALSE); /*try to reinsert OD for VRML/X3D with multiple URLs: 1- first remove from parent scene without destroying object, this will trigger a re-setup if other URLs are present @@ -447,6 +447,35 @@ static void term_on_media_add(void *user_priv, GF_ClientService *service, GF_Des if (!no_scene_check && scene->is_dynamic_scene) gf_scene_regenerate(scene); } +static void gather_buffer_level(GF_ObjectManager *odm, GF_ClientService *service, GF_NetworkCommand *com, s32 *max_buffer_time) +{ + u32 j, count = gf_list_count(odm->channels); + for (j=0; jchannels, j); + if (ch->service != service) continue; + if (ch->es_state != GF_ESM_ES_RUNNING) continue; + if (com->base.on_channel && (com->base.on_channel != ch)) continue; + if (/*!ch->MaxBuffer || */ch->dispatch_after_db || ch->bypass_sl_and_db || ch->IsEndOfStream) continue; + //perform buffer management only on base layer -this is because we don't signal which ESs are on/off in the underlying service ... + if (ch->esd->dependsOnESID) continue; + if (ch->MaxBuffer>com->buffer.max) com->buffer.max = ch->MaxBuffer; + if (ch->MinBufferbuffer.min) com->buffer.min = ch->MinBuffer; + if (ch->IsClockInit) { + if (ch->BufferTime > (s32) *max_buffer_time) + *max_buffer_time = ch->BufferTime; + + /*if we don't have more units (compressed or not) than requested max for the composition memory, request more data*/ + if (ch->odm->codec && ch->odm->codec->CB && (odm->codec->CB->UnitCount + ch->AU_Count <= odm->codec->CB->Capacity)) { + com->buffer.occupancy = 0; + } else if ( (u32) ch->BufferTime < com->buffer.occupancy) { + com->buffer.occupancy = ch->BufferTime; + } + } else { + com->buffer.occupancy = 0; + } + } +} + static void term_on_command(void *user_priv, GF_ClientService *service, GF_NetworkCommand *com, GF_Err response) { GF_Channel *ch; @@ -454,7 +483,7 @@ static void term_on_command(void *user_priv, GF_ClientService *service, GF_Netwo if (com->command_type==GF_NET_BUFFER_QUERY) { GF_List *od_list; - u32 i; + u32 i, max_buffer_time; GF_ObjectManager *odm; com->buffer.max = 0; com->buffer.min = com->buffer.occupancy = (u32) -1; @@ -477,37 +506,23 @@ static void term_on_command(void *user_priv, GF_ClientService *service, GF_Netwo /*get exclusive access to media scheduler, to make sure ODs are not being manipulated*/ gf_mx_p(term->mm_mx); + max_buffer_time=0; if (!gf_list_count(od_list)) GF_LOG(GF_LOG_WARNING, GF_LOG_MEDIA, ("[ODM] No object manager found for the scene (URL: %s), buffer occupancy will remain unchanged\n", service->url)); i=0; while ((odm = (GF_ObjectManager*)gf_list_enum(od_list, &i))) { - u32 j, count; if (!odm->codec) continue; - count = gf_list_count(odm->channels); - for (j=0; jchannels, j); - if (ch->service != service) continue; - if (ch->es_state != GF_ESM_ES_RUNNING) continue; - if (com->base.on_channel && (com->base.on_channel != ch)) continue; - if (/*!ch->MaxBuffer || */ch->dispatch_after_db || ch->bypass_sl_and_db || ch->IsEndOfStream) continue; - //perform buffer management only on base layer -this is because we don't signal which ESs are on/off in the underlying service ... - if (ch->esd->dependsOnESID) continue; - if (ch->MaxBuffer>com->buffer.max) com->buffer.max = ch->MaxBuffer; - if (ch->MinBufferbuffer.min) com->buffer.min = ch->MinBuffer; - if (ch->IsClockInit) { - /*if we don't have more units (compressed or not) than requested max for the composition memory, request more data*/ - if (ch->odm->codec && ch->odm->codec->CB && (odm->codec->CB->UnitCount + ch->AU_Count <= odm->codec->CB->Capacity)) { - com->buffer.occupancy = 0; -// com->buffer.occupancy = ch->BufferTime; - } else if ( (u32) ch->BufferTime < com->buffer.occupancy) { - com->buffer.occupancy = ch->BufferTime; - } - } - } + gather_buffer_level(odm, service, com, &max_buffer_time); } gf_mx_v(term->mm_mx); -// fprintf(stderr, "Buffer occupancy %d\n", com->buffer.occupancy); if (com->buffer.occupancy==(u32) -1) com->buffer.occupancy = 0; + + //in bench mode return the 1 if one of the buffer is full (eg sleep until all buffers are not full), 0 otherwise + if (term->bench_mode) { + com->buffer.occupancy = (max_buffer_time>com->buffer.max) ? 2 : 0; + com->buffer.max = 1; + com->buffer.min = 0; + } return; } if (com->command_type==GF_NET_SERVICE_INFO) { @@ -517,10 +532,32 @@ static void term_on_command(void *user_priv, GF_ClientService *service, GF_Netwo return; } if (com->command_type==GF_NET_SERVICE_MEDIA_CAP_QUERY) { - gf_sc_get_av_caps(term->compositor, &com->mcaps.width, &com->mcaps.height, &com->mcaps.bpp, &com->mcaps.channels, &com->mcaps.sample_rate); + gf_sc_get_av_caps(term->compositor, &com->mcaps.width, &com->mcaps.height, &com->mcaps.display_bit_depth, &com->mcaps.audio_bpp, &com->mcaps.channels, &com->mcaps.sample_rate); return; } + if (com->command_type==GF_NET_ASSOCIATED_CONTENT_LOCATION) { + GF_Scene *scene; + if (service->owner->subscene) { + scene = service->owner->subscene; + } else if (service->owner->parentscene) { + scene = service->owner->parentscene; + } + gf_scene_register_associated_media(scene, &com->addon_info); + return; + } + if (com->command_type==GF_NET_ASSOCIATED_CONTENT_TIMING) { + GF_Scene *scene; + if (service->owner->subscene) { + scene = service->owner->subscene; + } else if (service->owner->parentscene) { + scene = service->owner->parentscene; + } + gf_scene_notify_associated_media_timeline(scene, &com->addon_time); + return; + } + + if (!com->base.on_channel) return; ch = gf_term_get_channel(service, com->base.on_channel); @@ -610,13 +647,8 @@ static void term_on_command(void *user_priv, GF_ClientService *service, GF_Netwo gf_es_buffer_off(ch); break; case GF_NET_CHAN_BUFFER: - //lock channel before updating buffer info, otherwise we may collect wrong HTML media info - gf_mx_p(ch->mx); - ch->BufferTime = com->buffer.occupancy; - ch->MaxBuffer = com->buffer.max; + ch->BufferTime = 100 * com->buffer.occupancy / com->buffer.max; gf_scene_buffering_info(ch->odm->parentscene ? ch->odm->parentscene : ch->odm->subscene); - ch->MaxBuffer = 0; - gf_mx_v(ch->mx); break; default: return; @@ -696,7 +728,7 @@ static Bool check_extension(const char *szExtList, char *szExt) } -static GF_InputService *gf_term_can_handle_service(GF_Terminal *term, const char *url, const char *parent_url, Bool no_mime_check, char **out_url, GF_Err *ret_code, GF_DownloadSession **the_session) +static GF_InputService *gf_term_can_handle_service(GF_Terminal *term, const char *url, const char *parent_url, Bool no_mime_check, char **out_url, GF_Err *ret_code, GF_DownloadSession **the_session, char **out_mime_type) { u32 i; GF_Err e; @@ -712,6 +744,7 @@ static GF_InputService *gf_term_can_handle_service(GF_Terminal *term, const char mime_type = NULL; GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Looking for plugin for URL %s\n", url)); *out_url = NULL; + *out_mime_type = NULL; sURL = NULL; if (!url || !strncmp(url, "\\\\", 2) ) { (*ret_code) = GF_URL_ERROR; @@ -766,7 +799,7 @@ static GF_InputService *gf_term_can_handle_service(GF_Terminal *term, const char const char *sPlug = gf_cfg_get_key(term->user->config, "MimeTypes", mime_type); GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Mime type found: %s\n", mime_type)); if (!sPlug) { - gf_free(mime_type); + *out_mime_type = mime_type; mime_type=NULL; } if (sPlug) sPlug = strrchr(sPlug, '"'); @@ -885,13 +918,16 @@ exit: if (the_session && *the_session) { gf_dm_sess_del(*the_session); } + if (mime_type) gf_free(mime_type); + mime_type = NULL; + if (*out_mime_type) gf_free(*out_mime_type); + *out_mime_type = NULL; } else { *out_url = sURL; GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[Terminal] Found input plugin %s for URL %s (%s)\n", ifce->module_name, sURL, mime_type ? mime_type : "no mime type")); } if (mime_type) - gf_free(mime_type); - mime_type = NULL; + *out_mime_type = mime_type; return ifce; } @@ -899,8 +935,9 @@ GF_ClientService *gf_term_service_new(GF_Terminal *term, struct _od_manager *own { GF_DownloadSession *download_session = NULL; char *sURL; + char *mime; GF_ClientService *serv; - GF_InputService *ifce = gf_term_can_handle_service(term, url, parent_url, 0, &sURL, ret_code, &download_session); + GF_InputService *ifce = gf_term_can_handle_service(term, url, parent_url, 0, &sURL, ret_code, &download_session, &mime); if (!ifce) return NULL; GF_SAFEALLOC(serv, GF_ClientService); @@ -908,6 +945,7 @@ GF_ClientService *gf_term_service_new(GF_Terminal *term, struct _od_manager *own serv->owner = owner; serv->ifce = ifce; serv->url = sURL; + serv->mime = mime; serv->Clocks = gf_list_new(); serv->dnloads = gf_list_new(); serv->pending_service_session = download_session; @@ -923,13 +961,15 @@ Bool gf_term_is_supported_url(GF_Terminal *term, const char *fileName, Bool use_ GF_InputService *ifce; GF_Err e; char *sURL; + char *mime=NULL; char *parent_url = NULL; if (use_parent_url && term->root_scene) parent_url = term->root_scene->root_od->net_service->url; - ifce = gf_term_can_handle_service(term, fileName, parent_url, no_mime_check, &sURL, &e, NULL); + ifce = gf_term_can_handle_service(term, fileName, parent_url, no_mime_check, &sURL, &e, NULL, &mime); if (!ifce) return 0; gf_modules_close_interface((GF_BaseInterface *) ifce); gf_free(sURL); + if (mime) gf_free(mime); return 1; } @@ -1024,6 +1064,7 @@ void gf_term_delete_net_service(GF_ClientService *ns) gf_modules_close_interface((GF_BaseInterface *)ns->ifce); gf_free(ns->url); + gf_free(ns->mime); /*delete all the clocks*/ diff --git a/src/terminal/object_browser.c b/src/terminal/object_browser.c index 1cc3a66..4fa110b 100644 --- a/src/terminal/object_browser.c +++ b/src/terminal/object_browser.c @@ -255,6 +255,9 @@ GF_Err gf_term_get_object_info(GF_Terminal *term, GF_ObjectManager *odm, GF_Medi if (odm->codec->CB) { info->cb_max_count = odm->codec->CB->Capacity; info->cb_unit_count = odm->codec->CB->UnitCount; + if (odm->codec->direct_vout) { + info->direct_video_memory = 1; + } } } diff --git a/src/terminal/object_manager.c b/src/terminal/object_manager.c index 424c163..4aa2ebe 100644 --- a/src/terminal/object_manager.c +++ b/src/terminal/object_manager.c @@ -25,6 +25,7 @@ #include +#include #include #include "media_memory.h" #include "media_control.h" @@ -596,6 +597,24 @@ GF_Err ODM_ValidateOD(GF_ObjectManager *odm, Bool *hasInline) return GF_OK; } +static Bool gf_odm_should_auto_select(GF_ObjectManager *odm) +{ + u32 i, count; + if (gf_codec_is_scene_or_image(odm->codec)) return GF_TRUE; + + if (odm->parentscene && !odm->parentscene->is_dynamic_scene) return GF_TRUE; + + count = gf_list_count(odm->parentscene->resources); + for (i=0; iparentscene->resources, i); + if (an_odm==odm) continue; + if (!an_odm->codec) continue; + if (an_odm->codec->type != odm->codec->type) continue; + //same type - if the first one has been autumatically activated, do not activate this one + if (an_odm->state == GF_ODM_STATE_PLAY) return GF_FALSE; + } + return GF_TRUE; +} /*connection of OD and setup of streams. The streams are not requested if the OD @@ -724,7 +743,8 @@ void gf_odm_setup_object(GF_ObjectManager *odm, GF_ClientService *serv) if (odm->parentscene) { GF_Event evt; - gf_scene_setup_object(odm->parentscene, odm); + if (!odm->scalable_addon) + gf_scene_setup_object(odm->parentscene, odm); /*setup node decoder*/ if (odm->mo && odm->codec && odm->codec->decio && (odm->codec->decio->InterfaceType==GF_NODE_DECODER_INTERFACE) ) { @@ -732,7 +752,7 @@ void gf_odm_setup_object(GF_ObjectManager *odm, GF_ClientService *serv) GF_Node *n = gf_event_target_get_node(gf_mo_event_target_get(odm->mo, 0)); if (n) ndec->AttachNode(ndec, n); - /*not clear in the spec how the streams attached to AFC are started - default to "right now"*/ + /*not clear in the spec how the streams attached to AFX are started - default to "right now"*/ gf_odm_start(odm, 0); } @@ -775,7 +795,7 @@ void gf_odm_setup_object(GF_ObjectManager *odm, GF_ClientService *serv) have to wait for an entire image carousel period to start filling the buffers, which is sub-optimal we also force a prefetch for object declared outside the OD stream to make sure we don't loose any data before object declaration and play as can be the case with MPEG2 TS (first video packet right after the PMT) - this should be refined*/ - else if ( ((odm->flags & GF_ODM_NO_TIME_CTRL) || (odm->flags & GF_ODM_NOT_IN_OD_STREAM)) && (odm->parentscene->selected_service_id == odm->OD->ServiceID)) { + else if ( ((odm->flags & GF_ODM_NO_TIME_CTRL) || (odm->flags & GF_ODM_NOT_IN_OD_STREAM)) && gf_odm_should_auto_select(odm) && (odm->parentscene->selected_service_id == odm->OD->ServiceID)) { Bool force_play = GF_FALSE; if (odm->state==GF_ODM_STATE_STOP) { odm->flags |= GF_ODM_PREFETCH; @@ -785,6 +805,7 @@ void gf_odm_setup_object(GF_ObjectManager *odm, GF_ClientService *serv) else if ((odm->state==GF_ODM_STATE_PLAY) && (gf_list_del_item(odm->term->media_queue, odm)>=0) ) { force_play = GF_TRUE; } + if (force_play) { odm->flags |= GF_ODM_INITIAL_BROADCAST_PLAY; GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] Inserted from broadcast or input service - forcing play\n", odm->OD->objectDescriptorID)); @@ -801,14 +822,25 @@ void gf_odm_setup_object(GF_ObjectManager *odm, GF_ClientService *serv) if (odm->OD_PL) { gf_scene_select_object(odm->parentscene, odm); odm->OD_PL = 0; + gf_term_lock_net(odm->term, GF_FALSE); + return; + } + + if (odm->addon) { + gf_term_lock_net(odm->term, GF_FALSE); + + if (! odm->addon->scalable_type) { + gf_scene_select_object(odm->parentscene, odm); + } + return; } + if (odm->parentscene==odm->term->root_scene) { gf_term_lock_net(odm->term, GF_FALSE); evt.type = GF_EVENT_STREAMLIST; gf_term_send_event(odm->term,&evt); - - gf_term_lock_net(odm->term, GF_TRUE); + return; } } @@ -829,7 +861,6 @@ void ODM_CheckChannelService(GF_Channel *ch) GF_EXPORT GF_Err gf_odm_setup_es(GF_ObjectManager *odm, GF_ESD *esd, GF_ClientService *serv, GF_MediaObject *sync_ref) { - GF_CodecCapability cap; GF_Channel *ch; GF_Clock *ck; GF_List *ck_namespace; @@ -999,7 +1030,7 @@ clock_setup: /*we have a media or user-specific codec...*/ if (!odm->codec) { odm->codec = gf_codec_new(odm, esd, (esd->decoderConfig->streamType==GF_STREAM_VISUAL) ? odm->Visual_PL : odm->Audio_PL, &e); - if (!e) gf_term_add_codec(odm->term, odm->codec); + if (!e && odm->codec) gf_term_add_codec(odm->term, odm->codec); } dec = odm->codec; break; @@ -1032,7 +1063,7 @@ clock_setup: } dec = odm->subscene->scene_codec; } else { - /*this is a bit tricky: the scene decoder needs to ba called with the dummy streams of this + /*this is a bit tricky: the scene decoder needs to be called with the dummy streams of this object, so we associate the main decoder to this object*/ odm->codec = dec = gf_codec_use_codec(odm->parentscene->scene_codec, odm); gf_term_add_codec(odm->term, odm->codec); @@ -1042,23 +1073,20 @@ clock_setup: default: if (!odm->codec) { odm->codec = gf_codec_new(odm, esd, odm->OD_PL, &e); - if (!e) gf_term_add_codec(odm->term, odm->codec); + if (!e && odm->codec) gf_term_add_codec(odm->term, odm->codec); } dec = odm->codec; break; } - /*if we have a decoder, set up the channel and co.*/ - if (!dec) { - if (e) { - gf_es_del(ch); - return e; - } + if (!dec && e) { + gf_es_del(ch); + return e; } /*setup scene decoder*/ - if (dec->decio && (dec->decio->InterfaceType==GF_SCENE_DECODER_INTERFACE) ) { + if (dec && dec->decio && (dec->decio->InterfaceType==GF_SCENE_DECODER_INTERFACE) ) { GF_SceneDecoder *sdec = (GF_SceneDecoder *) dec->decio; scene = odm->subscene ? odm->subscene : odm->parentscene; if (sdec->AttachScene) { @@ -1075,7 +1103,11 @@ clock_setup: } } } - { + + ch->es_state = GF_ESM_ES_SETUP; + ch->odm = odm; + + if (dec) { GF_CodecCapability cap; cap.CapCode = GF_CODEC_RAW_MEDIA; gf_codec_get_capability(dec, &cap); @@ -1083,13 +1115,8 @@ clock_setup: dec->flags |= GF_ESM_CODEC_IS_RAW_MEDIA; dec->process = gf_codec_process_private_media; } - } - ch->es_state = GF_ESM_ES_SETUP; - ch->odm = odm; - - /*get media padding BEFORE channel setup, since we use it on channel connect ack*/ - if (dec) { + /*get media padding BEFORE channel setup, since we use it on channel connect ack*/ cap.CapCode = GF_CODEC_PADDING_BYTES; gf_codec_get_capability(dec, &cap); ch->media_padding_bytes = cap.cap.valueInt; @@ -1118,7 +1145,7 @@ clock_setup: cs->dec = dec; /*HACK: special case when OD resources are statically described in the ESD itself (ISMA streaming)*/ - if ((ch->esd->decoderConfig->streamType==GF_STREAM_OD) && strstr(ch->esd->URLString, "data:application/mpeg4-od-au;") ) + if (dec && (ch->esd->decoderConfig->streamType==GF_STREAM_OD) && strstr(ch->esd->URLString, "data:application/mpeg4-od-au;") ) dec->flags |= GF_ESM_CODEC_IS_STATIC_OD; gf_term_lock_net(odm->term, 1); @@ -1160,7 +1187,7 @@ GF_Err gf_odm_post_es_setup(GF_Channel *ch, GF_Codec *dec, GF_Err had_err) } /*insert channel*/ - if (dec) gf_list_insert(ch->odm->channels, ch, 0); + gf_list_insert(ch->odm->channels, ch, 0); if (ch->service) { ch->es_state = GF_ESM_ES_WAIT_FOR_ACK; @@ -1198,7 +1225,7 @@ GF_Err gf_odm_post_es_setup(GF_Channel *ch, GF_Codec *dec, GF_Err had_err) gf_term_message(ch->odm->term, ch->service->url, "Audio Setup failed", e); break; } - gf_list_rem(ch->odm->channels, 0); + gf_list_del_item(ch->odm->channels, ch); /*disconnect*/ ch->service->ifce->DisconnectChannel(ch->service->ifce, ch); if (ch->esd->URLString) { @@ -1360,6 +1387,12 @@ void gf_odm_start(GF_ObjectManager *odm, u32 media_queue_state) GF_LOG(GF_LOG_INFO, GF_LOG_MEDIA, ("[ODM%d] CH%d: At OTB %u starting channel\n", odm->OD->objectDescriptorID, ch->esd->ESID, gf_clock_time(ch->clock))); } skip_register = 0; + + } + //wait for end of setup + else if (odm->state==GF_ODM_STATE_IN_SETUP) { + media_queue_state=0; + skip_register = 0; } /*object is already started - only reinsert in media queue if this function was called on an object already in the queue*/ else { @@ -1456,10 +1489,24 @@ void gf_odm_play(GF_ObjectManager *odm) ck_time = (Double) (s64) odm->media_start_time; ck_time /= 1000; } + else if (odm->parentscene && odm->parentscene->root_od->media_start_time && !ch->clock->clock_init) { + ck_time = (Double) (s64) odm->parentscene->root_od->media_start_time; + ck_time /= 1000; + } /*play from current time*/ else { ck_time = gf_clock_time(ch->clock); + if (odm->parentscene && odm->parentscene->root_od->addon) { + ck_time = gf_scene_adjust_time_for_addon(odm->parentscene, (u32) ck_time, odm->parentscene->root_od->addon); + + if (odm->scalable_addon) { + //this is a scalable extension to an object in the parent scene + gf_scene_select_scalable_addon(odm->parentscene->root_od->parentscene, odm); + } + + } ck_time /= 1000; + /*handle initial start - MPEG-4 is a bit annoying here, streams are not started through OD but through scene nodes. If the stream runs on the BIFS/OD clock, the clock is already started at this point and we're sure to get at least a one-frame delay in PLAY, so just remove it - note we're generous but this shouldn't hurt*/ @@ -1615,7 +1662,10 @@ void gf_odm_stop(GF_ObjectManager *odm, Bool force_close) /*little opt for image codecs: don't actually stop the OD*/ if (!force_close && odm->codec && odm->codec->CB && !odm->codec->CB->no_allocation) { - if (odm->codec->CB->Capacity==1) return; + if (odm->codec->CB->Capacity==1) { + gf_cm_abort_buffering(odm->codec->CB); + return; + } } /*if raw media, stop all channels before sending stop command to network, to avoid new media frames to be set @@ -2025,12 +2075,25 @@ void gf_odm_init_segments(GF_ObjectManager *odm, GF_List *list, MFURL *url) void gf_odm_signal_eos(GF_ObjectManager *odm) { - //FIXME make this work with gui ? - if (odm->parentscene && (odm->parentscene != odm->term->root_scene) ) return; - if (gf_term_check_end_of_scene(odm->term, 0)) { - GF_Event evt; - evt.type = GF_EVENT_EOS; - gf_term_send_event(odm->term, &evt); + if (odm->parentscene && (odm->parentscene != odm->term->root_scene) ) { + GF_ObjectManager *root = odm->parentscene->root_od; + Bool is_over = 0; + + if (!gf_scene_check_clocks(root->net_service, root->subscene)) return; + if (root->subscene->is_dynamic_scene) + is_over = 1; + else + is_over = gf_sc_is_over(odm->term->compositor, root->subscene->graph); + + if (is_over) { + gf_term_service_media_event(root, GF_EVENT_MEDIA_ENDED); + } + } else { + if (gf_term_check_end_of_scene(odm->term, 0)) { + GF_Event evt; + evt.type = GF_EVENT_EOS; + gf_term_send_event(odm->term, &evt); + } } } diff --git a/src/terminal/scene.c b/src/terminal/scene.c index 82b6047..8557116 100644 --- a/src/terminal/scene.c +++ b/src/terminal/scene.c @@ -43,6 +43,8 @@ #include "input_sensor.h" #include "media_memory.h" +void gf_scene_reset_addons(GF_Scene *scene); + GF_EXPORT Double gf_scene_get_time(void *_is) { @@ -80,6 +82,7 @@ GF_Scene *gf_scene_new(GF_Scene *parentScene) tmp->resources = gf_list_new(); tmp->scene_objects = gf_list_new(); tmp->extra_scenes = gf_list_new(); + tmp->declared_addons = gf_list_new(); /*init inline scene*/ if (parentScene) { tmp->graph = gf_sg_new_subscene(parentScene->graph); @@ -156,6 +159,8 @@ void gf_scene_del(GF_Scene *scene) gf_list_del(scene->keynavigators); #endif + gf_list_del(scene->declared_addons); + if (scene->audio_url.url) gf_free(scene->audio_url.url); if (scene->visual_url.url) gf_free(scene->visual_url.url); if (scene->text_url.url) gf_free(scene->text_url.url); @@ -255,8 +260,11 @@ void gf_scene_disconnect(GF_Scene *scene, Bool for_shutdown) gf_sc_set_scene(scene->root_od->term->compositor, NULL); } + gf_scene_reset_addons(scene); + /*release the scene - at this stage, we no longer have any node stack refering to our media objects */ if (dec && dec->ReleaseScene) dec->ReleaseScene(dec); + gf_sc_node_destroy(scene->root_od->term->compositor, NULL, scene->graph); gf_sg_reset(scene->graph); scene->graph_attached = 0; @@ -530,27 +538,28 @@ void gf_scene_buffering_info(GF_Scene *scene) -void gf_scene_notify_event(GF_Scene *scene, u32 event_type, GF_Node *n, void *_event, GF_Err code) +void gf_scene_notify_event(GF_Scene *scene, u32 event_type, GF_Node *n, void *_event, GF_Err code, Bool no_queueing) { /*fire resize event*/ #ifndef GPAC_DISABLE_SVG GF_Node *root; u32 i, count; u32 w, h; - GF_DOM_Event evt, *event; - event = (GF_DOM_Event *)_event; + GF_DOM_Event evt, *dom_event; + dom_event = (GF_DOM_Event *)_event; if (!scene) return; root = gf_sg_get_root_node(scene->graph); - if (!event) { + if (!dom_event) { memset(&evt, 0, sizeof(GF_DOM_Event)); - event = &evt; + dom_event = &evt; w = h = 0; gf_sg_get_scene_size_info(scene->graph, &w, &h); evt.type = event_type; evt.screen_rect.width = INT2FIX(w); evt.screen_rect.height = INT2FIX(h); + evt.key_flags = scene->is_dynamic_scene; if (root) { #ifndef GPAC_DISABLE_VRML switch (gf_node_get_tag(root)) { @@ -570,13 +579,28 @@ void gf_scene_notify_event(GF_Scene *scene, u32 event_type, GF_Node *n, void *_e evt.error_state = code; } if (n) { - gf_dom_event_fire(n, event); + if (no_queueing) { + gf_dom_event_fire(n, dom_event); + } else { + gf_sc_queue_dom_event(scene->root_od->term->compositor, n, dom_event); + } } else { - if (root) gf_dom_event_fire(root, event); + if (root) { + if (no_queueing) { + gf_dom_event_fire(root, dom_event); + } else { + gf_sc_queue_dom_event(scene->root_od->term->compositor, root, dom_event); + } + } count=scene->root_od->mo ? gf_mo_event_target_count(scene->root_od->mo) : 0; for (i=0;iroot_od->mo, i)), event); + GF_Node *an = gf_event_target_get_node(gf_mo_event_target_get(scene->root_od->mo, i)); + if (no_queueing) { + gf_dom_event_fire(an, dom_event); + } else { + gf_sc_queue_dom_event(scene->root_od->term->compositor, an, dom_event); + } } } #endif @@ -623,7 +647,7 @@ void gf_scene_attach_to_compositor(GF_Scene *scene) gf_sc_set_size(scene->root_od->term->compositor, w, h); } /*trigger a scene attach event*/ - gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK); + gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK, GF_FALSE); } } @@ -695,7 +719,12 @@ restart: if (obj->odm) { Bool can_reuse = GF_TRUE; Bool timeline_locked = (obj->odm->flags & GF_ODM_INHERIT_TIMELINE) ? GF_TRUE : GF_FALSE; - if (timeline_locked != lock_timelines) + + //addon object always share the timeline + if (obj->odm->addon || obj->odm->parentscene->root_od->addon) + timeline_locked = lock_timelines = 1; + + if (timeline_locked != lock_timelines) continue; gf_term_lock_media_queue(scene->root_od->term, GF_TRUE); @@ -956,6 +985,7 @@ static void IS_UpdateVideoPos(GF_Scene *scene) { MFURL url; M_Transform2D *tr; + M_Layer2D *layer; GF_MediaObject *mo; u32 w, h, v_w, v_h; if (!scene->visual_url.OD_ID && !scene->visual_url.url) return; @@ -975,6 +1005,20 @@ static void IS_UpdateVideoPos(GF_Scene *scene) tr->translation.y = INT2FIX((s32) (h - v_h)) / 2; gf_node_dirty_set((GF_Node *)tr, 0, 0); + + tr = (M_Transform2D *) gf_sg_find_node_by_name(scene->graph, "ADDON_TRANS"); + if (!tr) return; + tr->translation.x = INT2FIX(v_w) / 4; + tr->translation.y = INT2FIX(v_h) / 4; + gf_node_dirty_set((GF_Node *)tr, 0, 0); + + layer = (M_Layer2D *) gf_sg_find_node_by_name(scene->graph, "ADDON_LAYER"); + if (!layer) return; + layer->size.x = INT2FIX(v_w) / 2; + layer->size.y = INT2FIX(v_h) / 2; + gf_node_dirty_set((GF_Node *)layer, 0, 0); + + if (scene->root_od->term->root_scene == scene) { //if (scene->graph_attached) gf_sc_set_scene(scene->root_od->term->compositor, NULL); //gf_sc_set_scene(scene->root_od->term->compositor, scene->graph); @@ -1009,11 +1053,17 @@ static void set_media_url(GF_Scene *scene, SFURL *media_url, GF_Node *node, MFU u32 i=0; GF_ObjectManager *odm = NULL; while ((odm = (GF_ObjectManager*)gf_list_enum(scene->resources, &i))) { + if (odm->scalable_addon) + continue; + if (type==GF_STREAM_TEXT) { if (!odm->codec || ((odm->codec->type!=type) && (odm->codec->type!=GF_STREAM_ND_SUBPIC))) continue; } else if (type==GF_STREAM_SCENE) { if (!odm->subscene || (!odm->subscene->scene_codec && !odm->subscene->is_dynamic_scene) ) continue; + + if (odm->subscene->root_od->addon) + continue; } else { if (!odm->codec || (odm->codec->type!=type)) continue; @@ -1079,6 +1129,9 @@ void gf_scene_regenerate(GF_Scene *scene) M_MovieTexture *mt; M_AnimationStream *as; M_Inline *dims; + M_Transform2D *addon_tr; + M_Layer2D *addon_layer; + M_Inline *addon_scene; if (scene->is_dynamic_scene != 1) return; @@ -1147,12 +1200,27 @@ void gf_scene_regenerate(GF_Scene *scene) /*3GPP DIMS streams controlled */ n1 = gf_sg_get_root_node(scene->graph); - dims = (M_Inline *) is_create_node(scene->graph, TAG_MPEG4_Inline, "DYN_SCENE"); + dims = (M_Inline *) is_create_node(scene->graph, TAG_MPEG4_Inline, "DIMS_SCENE"); gf_node_list_add_child( &((GF_ParentNode *)n1)->children, (GF_Node*)dims); gf_node_register((GF_Node *)dims, n1); + + /*Media addon scene*/ + n1 = gf_sg_get_root_node(scene->graph); + addon_tr = (M_Transform2D *) is_create_node(scene->graph, TAG_MPEG4_Transform2D, "ADDON_TRANS"); + gf_node_list_add_child( &((GF_ParentNode *)n1)->children, (GF_Node*)addon_tr); + gf_node_register((GF_Node *)addon_tr, n1); + + addon_layer = (M_Layer2D *) is_create_node(scene->graph, TAG_MPEG4_Layer2D, "ADDON_LAYER"); + gf_node_list_add_child( &((GF_ParentNode *)addon_tr)->children, (GF_Node*)addon_layer); + gf_node_register((GF_Node *)addon_layer, (GF_Node *)addon_tr); + + addon_scene = (M_Inline *) is_create_node(scene->graph, TAG_MPEG4_Inline, "ADDON_SCENE"); + gf_node_list_add_child( &((GF_ParentNode *)addon_layer)->children, (GF_Node*)addon_scene); + gf_node_register((GF_Node *)addon_scene, (GF_Node *)addon_layer); } + ac = (M_AudioClip *) gf_sg_find_node_by_name(scene->graph, "DYN_AUDIO"); set_media_url(scene, &scene->audio_url, (GF_Node*)ac, &ac->url, GF_STREAM_AUDIO); @@ -1162,7 +1230,7 @@ void gf_scene_regenerate(GF_Scene *scene) as = (M_AnimationStream *) gf_sg_find_node_by_name(scene->graph, "DYN_TEXT"); set_media_url(scene, &scene->text_url, (GF_Node*)as, &as->url, GF_STREAM_TEXT); - dims = (M_Inline *) gf_sg_find_node_by_name(scene->graph, "DYN_SCENE"); + dims = (M_Inline *) gf_sg_find_node_by_name(scene->graph, "DIMS_SCENE"); set_media_url(scene, &scene->dims_url, (GF_Node*)dims, &dims->url, GF_STREAM_SCENE); gf_sc_lock(scene->root_od->term->compositor, 0); @@ -1176,7 +1244,7 @@ void gf_scene_regenerate(GF_Scene *scene) IS_UpdateVideoPos(scene); } else { scene->graph_attached = 1; - gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK); + gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK, GF_FALSE); gf_term_invalidate_compositor(scene->root_od->term); } } @@ -1211,7 +1279,9 @@ void gf_scene_select_object(GF_Scene *scene, GF_ObjectManager *odm) char *url; if (!scene->is_dynamic_scene || !scene->graph_attached || !odm) return; - if (!odm->codec) return; + if (!odm->codec) { + if (!odm->addon) return; + } if (odm->state) { if (check_odm_deactivate(&scene->audio_url, odm, gf_sg_find_node_by_name(scene->graph, "DYN_AUDIO")) ) return; @@ -1219,6 +1289,16 @@ void gf_scene_select_object(GF_Scene *scene, GF_ObjectManager *odm) if (check_odm_deactivate(&scene->text_url, odm, gf_sg_find_node_by_name(scene->graph, "DYN_TEXT") )) return; } + + if (!odm->codec && odm->subscene) { + M_Inline *dscene = (M_Inline *) gf_sg_find_node_by_name(scene->graph, "ADDON_SCENE"); + + gf_sg_vrml_field_copy(&dscene->url, &odm->mo->URLs, GF_SG_VRML_MFURL); + gf_node_changed((GF_Node *)dscene, NULL); + IS_UpdateVideoPos(scene); + return; + } + if (odm->codec->type == GF_STREAM_AUDIO) { M_AudioClip *ac = (M_AudioClip *) gf_sg_find_node_by_name(scene->graph, "DYN_AUDIO"); if (!ac) return; @@ -1352,8 +1432,6 @@ void gf_scene_force_size(GF_Scene *scene, u32 width, u32 height) /*for now only allowed when no scene info*/ if (!scene->is_dynamic_scene) return; - gf_sc_lock(scene->root_od->term->compositor, 1); - GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Compositor] Changing scene size to %d x %d\n", width, height)); if (scene->root_od->term->root_scene == scene) { @@ -1393,9 +1471,7 @@ void gf_scene_force_size(GF_Scene *scene, u32 width, u32 height) IS_UpdateVideoPos(scene); #endif - gf_sc_lock(scene->root_od->term->compositor, 0); - - gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK); + gf_scene_notify_event(scene, GF_EVENT_SCENE_ATTACHED, NULL, NULL, GF_OK, GF_FALSE); } @@ -1509,7 +1585,7 @@ Bool gf_scene_check_clocks(GF_ClientService *ns, GF_Scene *scene) } i=0; while ( (odm = (GF_ObjectManager*)gf_list_enum(scene->resources, &i)) ) { - if (odm->net_service != ns) { + if (odm->net_service && (odm->net_service != ns)) { if (!gf_scene_check_clocks(odm->net_service, NULL)) return 0; } else if (odm->codec && odm->codec->CB && !gf_cm_is_eos(odm->codec->CB) ) { return 0; @@ -1579,6 +1655,7 @@ void gf_scene_generate_views(GF_Scene *scene, char *url, char *parent_path) M_Inline *inl; #endif GF_Event evt; + gf_sc_node_destroy(scene->root_od->term->compositor, NULL, scene->graph); gf_sg_reset(scene->graph); scene->force_single_timeline = 1; @@ -1663,3 +1740,209 @@ void gf_scene_generate_views(GF_Scene *scene, char *url, char *parent_path) evt.connect.is_connected = 1; gf_term_send_event(scene->root_od->term, &evt); } + +void scene_reset_addon(GF_AddonMedia *addon, Bool disconnect) +{ + if (disconnect && addon->root_od) gf_odm_disconnect(addon->root_od, 1); + if (addon->url) gf_free(addon->url); + gf_free(addon); +} + +void gf_scene_reset_addons(GF_Scene *scene) +{ + while (gf_list_count(scene->declared_addons)) { + GF_AddonMedia *addon = gf_list_last(scene->declared_addons); + gf_list_rem_last(scene->declared_addons); + if (addon==scene->active_addon) continue; + + scene_reset_addon(addon, 0); + } + if (scene->active_addon) scene_reset_addon(scene->active_addon, 0); +} + +static void load_associated_media(GF_Scene *scene, GF_AddonMedia *addon) +{ + GF_MediaObject *mo; + MFURL url; + SFURL sfurl; + + if (!addon->enabled) return; + + url.count=1; + url.vals = &sfurl; + url.vals[0].OD_ID = GF_MEDIA_EXTERNAL_ID; + url.vals[0].url = (char *)addon->url; + + //we may need to change the object type once we have more ideas what the external resource is about. + //By default we start with scene + //we force the timeline of the addon to be locked with the main scene + mo = gf_scene_get_media_object(scene, &url, GF_MEDIA_OBJECT_SCENE, GF_TRUE); + + if (!mo) return; + gf_free(addon->url); + addon->url = NULL; + addon->root_od = mo->odm; + mo->odm->addon = addon; +} + +void gf_scene_register_associated_media(GF_Scene *scene, GF_AssociatedContentLocation *addon_info) +{ + GF_AddonMedia *addon; + GF_Event evt; + u32 i, count; + + if (!scene->is_dynamic_scene) return; + + count = gf_list_count(scene->declared_addons); + for (i=0; ideclared_addons, i); + if (addon->timeline_id==addon_info->timeline_id) { + if (addon_info->reload_external) { + //send message to service handler + } + return; + } + } + + if (!addon_info->external_URL) { + //NULL (nothing) will be active soon + if (addon_info->activation_countdown) return; + //otherwise reset addon + if (scene->active_addon) scene_reset_addon(scene->active_addon, 1); + scene->active_addon = NULL; + return; + } + + GF_SAFEALLOC(addon, GF_AddonMedia); + addon->timeline_id = addon_info->timeline_id; + addon->is_splicing = addon_info->is_splicing; + addon->activation_time = gf_scene_get_time(scene)+addon_info->activation_countdown; + addon->url = gf_strdup(addon_info->external_URL); + addon->media_timescale = 1; + addon->timeline_ready = (addon_info->timeline_id<0) ? 1 : 0; + if (addon->timeline_ready && !scene->active_addon) scene->active_addon = addon; + gf_list_add(scene->declared_addons, addon); + + + evt.type = GF_EVENT_ADDON_DETECTED; + evt.addon_connect.addon_url = addon->url; + addon->enabled = gf_term_send_event(scene->root_od->term,&evt); + + if (addon->timeline_ready) + load_associated_media(scene, addon); +} + +void gf_scene_notify_associated_media_timeline(GF_Scene *scene, GF_AssociatedContentTiming *addon_time) +{ + GF_AddonMedia *addon = scene->active_addon; + //locate the active timeline + if (!scene->active_addon || (scene->active_addon->timeline_id!=addon_time->timeline_id)) { + u32 i, count = gf_list_count(scene->declared_addons); + for (i=0; ideclared_addons, i); + if (addon->timeline_id==addon_time->timeline_id) + break; + addon = NULL; + } + if (!addon) return; + + count = i; + for (i=0; ideclared_addons, i); + //we are adding a non splicing point: discard all previously declared addons + if (!addon->is_splicing + //this is a splicing point, discard all previsously declared splicing addons + || prev_addon->is_splicing + ) { + Bool discard = GF_FALSE; + scene_reset_addon(prev_addon, GF_TRUE); + gf_list_rem(scene->declared_addons, i); + i--; + count--; + } + } + + scene->active_addon = addon; + if (!scene->active_addon->timeline_ready) { + scene->active_addon->timeline_ready = GF_TRUE; + load_associated_media(scene, addon); + } + } + + assert(scene->active_addon->timeline_id == addon_time->timeline_id); + scene->active_addon->media_pts = addon_time->media_pts; + scene->active_addon->media_timestamp = addon_time->media_timestamp; + scene->active_addon->media_timescale = addon_time->media_timescale; +} + +u32 gf_scene_adjust_time_for_addon(GF_Scene *scene, u32 clock_time, GF_AddonMedia *addon) +{ + s64 media_ts_ms; + if (!addon->timeline_ready) + return clock_time; + assert(scene->root_od->addon); + assert(scene->root_od->addon==addon); + + media_ts_ms = clock_time; + + media_ts_ms -= (addon->media_pts/90); + media_ts_ms += (addon->media_timestamp*1000) / addon->media_timescale; + return (u32) media_ts_ms; +} + +u64 gf_scene_adjust_timestamp_for_addon(GF_Scene *scene, u64 orig_ts, GF_AddonMedia *addon) +{ + s64 media_ts_ms; + assert(addon->timeline_ready); + assert(scene->root_od->addon); + assert(scene->root_od->addon==addon); + + media_ts_ms = orig_ts; + media_ts_ms -= (addon->media_timestamp*1000) / addon->media_timescale; + media_ts_ms += (addon->media_pts/90); + + return (u64) media_ts_ms; +} + +void gf_scene_select_scalable_addon(GF_Scene *scene, GF_ObjectManager *odm) +{ + GF_NetworkCommand com; + GF_CodecCapability caps; + Bool nalu_annex_b; + GF_Channel *ch; + GF_ObjectManager *odm_base = NULL; + u32 i, count, mtype; + ch = gf_list_get(odm->channels, 0); + if (!ch->esd) return; + mtype = ch->esd->decoderConfig->streamType; + count = gf_list_count(scene->resources); + for (i=0; iresources, i); + if ((mtype==odm_base->codec->type) && odm_base->codec) + break; + odm_base=NULL; + //todo check if we use compatible formats, for now we only do demos with hevc/shvc + } + if (!odm_base) return; + + odm_base->scalable_odm = odm; + + nalu_annex_b = 1; + ch = gf_list_get(odm_base->channels, 0); + if (ch->esd->decoderConfig->decoderSpecificInfo && ch->esd->decoderConfig->decoderSpecificInfo->dataLength) + nalu_annex_b = 0; + + memset(&com, 0, sizeof(GF_NetworkCommand)); + com.command_type = GF_NET_CHAN_NALU_MODE; + com.nalu_mode.extract_mode = nalu_annex_b ? 1 : 0; + count = gf_list_count(odm->channels); + for (i=0; ichannels, i); + gf_term_service_command(ch->service, &com); + } + + //signal to the base decoder that we will want full quality + caps.CapCode = GF_CODEC_MEDIA_SWITCH_QUALITY; + caps.cap.valueInt = 2; +// odm_base->codec->decio->SetCapabilities(odm_base->codec->decio, caps); +} diff --git a/src/terminal/term_node_init.c b/src/terminal/term_node_init.c index a6bfd80..f4b4f76 100644 --- a/src/terminal/term_node_init.c +++ b/src/terminal/term_node_init.c @@ -300,6 +300,13 @@ void gf_term_on_node_modified(void *_is, GF_Node *node) } } +static void gf_term_on_node_destroyed(void *_is, GF_Node *node) +{ + GF_Scene *scene = (GF_Scene *)_is; + if (!scene) return; + gf_sc_node_destroy(scene->root_od->term->compositor, node, NULL); +} + GF_EXPORT void gf_term_node_callback(void *_is, u32 type, GF_Node *n, void *param) { @@ -307,6 +314,9 @@ void gf_term_node_callback(void *_is, u32 type, GF_Node *n, void *param) case GF_SG_CALLBACK_MODIFIED: gf_term_on_node_modified(_is, n); break; + case GF_SG_CALLBACK_NODE_DESTROY: + gf_term_on_node_destroyed(_is, n); + break; case GF_SG_CALLBACK_INIT: gf_term_on_node_init(_is, n); break; diff --git a/src/terminal/terminal.c b/src/terminal/terminal.c index 3998d6a..4e6cdee 100644 --- a/src/terminal/terminal.c +++ b/src/terminal/terminal.c @@ -308,10 +308,10 @@ static void gf_term_reload_cfg(GF_Terminal *term) /*reload term part*/ sOpt = gf_cfg_get_key(term->user->config, "Systems", "DrawLateFrames"); - if (sOpt && !stricmp(sOpt, "yes")) - term->flags &= ~GF_TERM_DROP_LATE_FRAMES; - else + if (sOpt && !stricmp(sOpt, "no")) term->flags |= GF_TERM_DROP_LATE_FRAMES; + else + term->flags &= ~GF_TERM_DROP_LATE_FRAMES; sOpt = gf_cfg_get_key(term->user->config, "Systems", "ForceSingleClock"); if (sOpt && !stricmp(sOpt, "yes")) @@ -346,6 +346,8 @@ static void gf_term_reload_cfg(GF_Terminal *term) else if (!stricmp(sOpt, "Multi")) mode = GF_TERM_THREAD_MULTI; gf_term_set_threading(term, mode); } + } else { + gf_term_set_threading(term, GF_TERM_THREAD_SINGLE); } /*default data timeout is 20 sec*/ @@ -1286,8 +1288,6 @@ void media_event_collect_info(GF_ClientService *net, GF_ObjectManager *odm, GF_D u32 val; if (ch->service != net) continue; - gf_mx_p(ch->mx); - media_event->bufferValid = GF_TRUE; if (ch->BufferTime>0) { if (ch->MaxBuffer) { @@ -1302,7 +1302,6 @@ void media_event_collect_info(GF_ClientService *net, GF_ObjectManager *odm, GF_D *min_time = 0; *min_buffer = 0; } - gf_mx_v(ch->mx); } } #endif @@ -1311,7 +1310,6 @@ void gf_term_service_media_event_with_download(GF_ObjectManager *odm, GF_EventTy { #ifndef GPAC_DISABLE_SVG u32 i, count, min_buffer, min_time; - Bool locked; GF_DOMMediaEvent media_event; GF_DOM_Event evt; GF_ObjectManager *an_od; @@ -1320,7 +1318,19 @@ void gf_term_service_media_event_with_download(GF_ObjectManager *odm, GF_EventTy if (!odm || !odm->net_service) return; if (odm->mo) { count = gf_mo_event_target_count(odm->mo); + + //for dynamic scenes, check if we have listeners on the root object of the scene containing this media + if (!count + && odm->parentscene + && odm->parentscene->is_dynamic_scene + && odm->parentscene->root_od->mo + && (odm->parentscene->root_od->net_service==odm->net_service) + ) { + odm = odm->parentscene->root_od; + count = gf_mo_event_target_count(odm->mo); + } if (!count) return; + if (0 && !(gf_node_get_dom_event_filter((GF_Node *)gf_event_target_get_node(gf_mo_event_target_get(odm->mo, 0))) & GF_DOM_EVENT_MEDIA)) return; } else { @@ -1354,19 +1364,18 @@ void gf_term_service_media_event_with_download(GF_ObjectManager *odm, GF_EventTy evt.type = event_type; evt.bubbles = 0; /*the spec says yes but we force it to NO*/ - /*lock scene to prevent concurrent access of scene data*/ - locked = gf_mx_try_lock(odm->term->compositor->mx); - if (!locked) return; - + //these events may be triggered from any input or decoding threads. Sync processing cannot be + //achieved in most cases, because we may run into deadlocks, especially if the event + //was triggered by a service opened by JS for (i=0; imo->evt_targets, i); - sg_fire_dom_event(target, &evt, scene->graph, NULL); + if (target) + gf_sc_queue_dom_event_on_target(scene->root_od->term->compositor, &evt, target, scene->graph); } if (!count) { GF_Node *root = gf_sg_get_root_node(scene->graph); - if (root) gf_dom_event_fire(root, &evt); + if (root) gf_sc_queue_dom_event(scene->root_od->term->compositor, root, &evt); } - gf_sc_lock(odm->term->compositor, GF_FALSE); #endif } @@ -1775,6 +1784,19 @@ GF_Err gf_term_scene_update(GF_Terminal *term, char *type, char *com) return gf_scene_execute_script(term->root_scene->graph, com); } + if (!type && com && !strncmp(com, "gpac ", 5)) { + com += 5; + //new add-on + if (term->root_scene && !strncmp(com, "add ", 4)) { + GF_AssociatedContentLocation addon_info; + memset(&addon_info, 0, sizeof(GF_AssociatedContentLocation)); + addon_info.external_URL = com + 4; + addon_info.timeline_id = -1; + gf_scene_register_associated_media(term->root_scene, &addon_info); + } + return GF_OK; + } + memset(&load, 0, sizeof(GF_SceneLoader)); load.localPath = gf_cfg_get_key(term->user->config, "General", "CacheDirectory"); load.flags = GF_SM_LOAD_FOR_PLAYBACK | GF_SM_LOAD_CONTEXT_READY; diff --git a/src/utils/color.c b/src/utils/color.c index 2c543ff..3ed6cb2 100644 --- a/src/utils/color.c +++ b/src/utils/color.c @@ -1116,18 +1116,18 @@ GF_Err gf_stretch_bits(GF_VideoSurface *dst, GF_VideoSurface *src, GF_Window *ds the_row --; if (flip) the_row = src->height-2 - the_row; if (yuv_planar_type==1) { - load_line_yv12(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr); + load_line_yv12(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, (u8 *) src->u_ptr, (u8 *) src->v_ptr); } else if (yuv_planar_type==3) { - load_line_yv12_10(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr); + load_line_yv12_10((char *) src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp,(u8 *) src->u_ptr, (u8 *) src->v_ptr); } else { - load_line_yuva(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr, src->a_ptr); + load_line_yuva(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, (u8 *) src->u_ptr, (u8 *) src->v_ptr, (u8 *) src->a_ptr); } the_row = src_row - 1; if (cmat) { for (i=0; i<2*src_w; i++) { u32 idx = 4*i; - gf_cmx_apply_argb(cmat, &tmp[idx+3], &tmp[idx], &tmp[idx+1], &tmp[idx+2]); + gf_cmx_apply_argb(cmat, (u8 *) &tmp[idx+3], (u8 *) &tmp[idx], (u8 *) &tmp[idx+1], (u8 *) &tmp[idx+2]); } } if (key) { @@ -1152,11 +1152,11 @@ GF_Err gf_stretch_bits(GF_VideoSurface *dst, GF_VideoSurface *src, GF_Window *ds } else { if (flip) the_row = src->height-2 - the_row; if (yuv_planar_type==1) { - load_line_yv12(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr); + load_line_yv12(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, (u8 *) src->u_ptr, (u8 *) src->v_ptr); } else if (yuv_planar_type==3) { - load_line_yv12_10(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr); + load_line_yv12_10(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, (u8 *) src->u_ptr, (u8 *) src->v_ptr); } else { - load_line_yuva(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, src->u_ptr, src->v_ptr, src->a_ptr); + load_line_yuva(src->video_buffer, x_off, the_row, src->pitch_y, src_w, src->height, tmp, (u8 *) src->u_ptr,(u8 *) src->v_ptr, (u8 *) src->a_ptr); } yuv_init = 1; rows = flip ? tmp + src_w * 4 : tmp; @@ -1392,3 +1392,175 @@ void gf_cmx_apply_fixed(GF_ColorMatrix *_this, Fixed *a, Fixed *r, Fixed *g, Fix *g = INT2FIX(GF_COL_G(col)) / 255; *b = INT2FIX(GF_COL_B(col)) / 255; } + + + +#ifdef WIN32 +# include +# define GPAC_HAS_SSE2 +#else +# ifdef __SSE2__ +# include +# define GPAC_HAS_SSE2 +# endif +#endif + +#ifdef GPAC_HAS_SSE2 + +static GF_Err gf_color_write_yv12_10_to_yuv_intrin(GF_VideoSurface *vs_dst, unsigned char *pY, unsigned char *pU, unsigned char*pV, u32 src_stride, u32 src_width, u32 src_height, const GF_Window *_src_wnd) +{ + u32 i, j, w, h; + if (!pU) { + pU = pY + src_stride * src_height; + pV = pY + 5*src_stride * src_height/4; + } + + if (_src_wnd) { + pY = pY + src_stride * _src_wnd->y + _src_wnd->x; + /*because of U and V downsampling by 2x2, working with odd Y offset will lead to a half-line shift between Y and UV components. We + therefore force an even Y offset for U and V planes.*/ + pU = pU + (src_stride * (_src_wnd->y / 2) + _src_wnd->x) / 2; + pV = pV + (src_stride * (_src_wnd->y / 2) + _src_wnd->x) / 2; + w = _src_wnd->w; + h = _src_wnd->h; + } else { + w = src_width; + h = src_height; + } + + if (vs_dst->pixel_format == GF_PIXEL_YV12) { + __m128i val1, val2, val_dst, *src1, *src2, *dst; + for (i=0; ivideo_buffer + i*vs_dst->pitch_y); + + for (j=0; jvideo_buffer + vs_dst->pitch_y * vs_dst->height + i*vs_dst->pitch_y/2); + + for (j=0; jvideo_buffer + 5*vs_dst->pitch_y * vs_dst->height/4 + i*vs_dst->pitch_y/2); + + for (j=0; jw; + h = _src_wnd->h; + } else { + w = src_width; + h = src_height; + } + + +#ifdef GPAC_HAS_SSE2 + +#ifdef GPAC_64_BITS +#define GFINTCAST (u64) +#else +#define GFINTCAST (u32) +#endif + + if ( (w%32 == 0) + && (GFINTCAST (vs_dst->video_buffer + vs_dst->pitch_y)%8 == 0) + && (GFINTCAST (vs_dst->video_buffer + vs_dst->pitch_y * vs_dst->height + vs_dst->pitch_y/2)%8 == 0) + && (GFINTCAST (pU + src_stride/2)%8 == 0) + && (GFINTCAST (pV + src_stride/2)%8 == 0) + ) { + return gf_color_write_yv12_10_to_yuv_intrin(vs_dst, pY, pU, pV, src_stride, src_width, src_height, _src_wnd); + } +#endif + + if (!pU) { + pU = pY + src_stride * src_height; + pV = pY + 5*src_stride * src_height/4; + } + + if (_src_wnd) { + pY = pY + src_stride * _src_wnd->y + _src_wnd->x; + /*because of U and V downsampling by 2x2, working with odd Y offset will lead to a half-line shift between Y and UV components. We + therefore force an even Y offset for U and V planes.*/ + pU = pU + (src_stride * (_src_wnd->y / 2) + _src_wnd->x) / 2; + pV = pV + (src_stride * (_src_wnd->y / 2) + _src_wnd->x) / 2; + } + + if (vs_dst->pixel_format == GF_PIXEL_YV12) { + for (i=0; ivideo_buffer + i*vs_dst->pitch_y; + + for (j=0; j> 2; + dst++; + src++; + } + } + + for (i=0; ivideo_buffer + vs_dst->pitch_y * vs_dst->height + i*vs_dst->pitch_y/2; + + for (j=0; j> 2; + dst++; + src++; + } + } + + for (i=0; ivideo_buffer + 5*vs_dst->pitch_y * vs_dst->height/4 + i*vs_dst->pitch_y/2; + + for (j=0; j> 2; + dst++; + src++; + } + } + return GF_OK; + } + return GF_NOT_SUPPORTED; +} + diff --git a/src/utils/downloader.c b/src/utils/downloader.c index d45b3e7..19476e5 100644 --- a/src/utils/downloader.c +++ b/src/utils/downloader.c @@ -1784,7 +1784,7 @@ static void gf_icy_skip_data(GF_DownloadSession * sess, u32 icy_metaint, const c } -static u8 *gf_dm_get_chunk_data(GF_DownloadSession *sess, u8 *body_start, u32 *payload_size, u32 *header_size) +static char *gf_dm_get_chunk_data(GF_DownloadSession *sess, char *body_start, u32 *payload_size, u32 *header_size) { u32 size; char *te_header, *sep; @@ -1803,7 +1803,7 @@ static u8 *gf_dm_get_chunk_data(GF_DownloadSession *sess, u8 *body_start, u32 *p } - te_header = strstr(body_start, "\r\n"); + te_header = strstr((char *) body_start, "\r\n"); if (!te_header) return NULL; te_header[0] = 0; @@ -1833,7 +1833,7 @@ static GFINLINE void gf_dm_data_received(GF_DownloadSession *sess, u8 *payload, hdr_size = 0; remaining = 0; if (sess->chunked) { - data = gf_dm_get_chunk_data(sess, payload, &nbBytes, &hdr_size); + data = (u8 *) gf_dm_get_chunk_data(sess, (char *) payload, &nbBytes, &hdr_size); if (hdr_size + nbBytes + 2 > payload_size) { remaining = nbBytes + 2 - payload_size + hdr_size; nbBytes = payload_size - hdr_size; @@ -1870,14 +1870,14 @@ static GFINLINE void gf_dm_data_received(GF_DownloadSession *sess, u8 *payload, } if (sess->icy_metaint > 0) - gf_icy_skip_data(sess, sess->icy_metaint, data, nbBytes); + gf_icy_skip_data(sess, sess->icy_metaint, (char *) data, nbBytes); else { if (sess->use_cache_file) - gf_cache_write_to_cache( sess->cache_entry, sess, data, nbBytes); + gf_cache_write_to_cache( sess->cache_entry, sess, (char *) data, nbBytes); par.msg_type = GF_NETIO_DATA_EXCHANGE; par.error = GF_OK; - par.data = data; + par.data = (char *) data; par.size = nbBytes; par.reply = flush_chunk; gf_dm_sess_user_io(sess, &par); @@ -1967,7 +1967,7 @@ GF_Err gf_dm_sess_fetch_data(GF_DownloadSession *sess, char *buffer, u32 buffer_ if (e) return e; size = *read_size; *read_size = 0; - gf_dm_data_received(sess, buffer, size, 0, read_size); + gf_dm_data_received(sess, (u8 *) buffer, size, 0, read_size); return GF_OK; } @@ -2349,7 +2349,7 @@ static GF_Err http_parse_remaining_body(GF_DownloadSession * sess, char * sHTTP) if (e == GF_IP_CONNECTION_CLOSED){ u32 len = gf_cache_get_content_length(sess->cache_entry); if (size > 0) - gf_dm_data_received(sess, sHTTP, size, 0, NULL); + gf_dm_data_received(sess, (u8 *) sHTTP, size, 0, NULL); if ( ( (len == 0) && sess->use_cache_file) /*ivica patch*/ || (size==0) @@ -2369,7 +2369,7 @@ static GF_Err http_parse_remaining_body(GF_DownloadSession * sess, char * sHTTP) gf_dm_sess_notify_state(sess, sess->status, e); return e; } - gf_dm_data_received(sess, sHTTP, size, 0, NULL); + gf_dm_data_received(sess, (u8 *) sHTTP, size, 0, NULL); /*socket empty*/ if (size < GF_DOWNLOAD_BUFFER_SIZE) { @@ -2877,10 +2877,13 @@ static GF_Err wait_for_header_and_parse(GF_DownloadSession *sess, char * sHTTP) sess->use_cache_file = 0; } - GF_LOG(GF_LOG_ERROR, GF_LOG_NETWORK, - (e ? ("[HTTP] Error connecting to %s: %s\n", sess->server_name, gf_error_to_string(e) ) - : ("[HTTP] Connected to %s\n", sess->server_name ) - )); +#ifndef GPAC_DISABLE_LOGS + if (e) { + GF_LOG(GF_LOG_ERROR, GF_LOG_NETWORK, ("[HTTP] Error connecting to %s: %s\n", sess->server_name, gf_error_to_string(e) ) ); + } else { + GF_LOG(GF_LOG_INFO, GF_LOG_NETWORK, ("[HTTP] Connected to %s\n", sess->server_name ) ); + } +#endif /*some servers may reply without content length, but we MUST have it*/ if (e) goto exit; @@ -2924,7 +2927,7 @@ static GF_Err wait_for_header_and_parse(GF_DownloadSession *sess, char * sHTTP) sess->init_data_size = 0; sess->init_data = NULL; - gf_dm_data_received(sess, sHTTP + BodyStart, bytesRead - BodyStart, 1, NULL); + gf_dm_data_received(sess, (u8 *) sHTTP + BodyStart, bytesRead - BodyStart, 1, NULL); } exit: if (e) { diff --git a/src/utils/module.c b/src/utils/module.c index b927706..2cd3dbd 100644 --- a/src/utils/module.c +++ b/src/utils/module.c @@ -44,8 +44,12 @@ static void load_all_modules(GF_ModuleManager *mgr) #ifdef GPAC_STATIC_MODULES GF_InterfaceRegister *pr; +#ifdef GPAC_HAS_FAAD LOAD_PLUGIN(aac_in); +#endif +#ifdef GPAC_HAS_AC3 LOAD_PLUGIN(ac3); +#endif #ifdef GPAC_HAS_ALSA LOAD_PLUGIN(alsa); #endif @@ -82,7 +86,9 @@ static void load_all_modules(GF_ModuleManager *mgr) #ifndef GPAC_DISABLE_SVG LOAD_PLUGIN(laser); #endif +#ifdef GPAC_HAS_MAD LOAD_PLUGIN(mp3_in); +#endif LOAD_PLUGIN(mpd_in); #ifndef GPAC_DISABLE_MEDIA_IMPORT LOAD_PLUGIN(mpegts_in); @@ -127,6 +133,9 @@ static void load_all_modules(GF_ModuleManager *mgr) #ifdef GPAC_HAS_WAVEOUT LOAD_PLUGIN(wave_out); #endif +#ifndef GPAC_DISABLE_TTXT + LOAD_PLUGIN(vtt_in); +#endif #ifndef GPAC_DISABLE_SVG LOAD_PLUGIN(widgetman); #endif @@ -136,11 +145,7 @@ static void load_all_modules(GF_ModuleManager *mgr) #ifdef GPAC_HAS_XVID LOAD_PLUGIN(xvid); #endif - - LOAD_PLUGIN(ffmpeg); - - - + //todo fix project for iOS #ifdef GPAC_IPHONE // LOAD_PLUGIN(ios_cam); diff --git a/src/utils/os_divers.c b/src/utils/os_divers.c index 496b8be..042d41c 100644 --- a/src/utils/os_divers.c +++ b/src/utils/os_divers.c @@ -68,6 +68,7 @@ #define SLEEP_ABS_SELECT 1 static u32 sys_start_time = 0; +static u64 sys_start_time_hr = 0; #endif @@ -85,6 +86,15 @@ u32 gf_sys_clock() gettimeofday(&now, NULL); return ( (now.tv_sec)*1000 + (now.tv_usec) / 1000) - sys_start_time; } + +GF_EXPORT +u64 gf_sys_clock_high_res() +{ + struct timeval now; + gettimeofday(&now, NULL); + return (now.tv_sec)*1000000 + (now.tv_usec) - sys_start_time_hr; +} + #endif @@ -1069,6 +1079,13 @@ u32 gf_sys_clock() { return OS_GetSysClock(); } + + +static u64 (*OS_GetSysClockHR)(); +u64 gf_sys_clock_high_res() +{ + return OS_GetSysClockHR(); +} #endif @@ -1082,6 +1099,14 @@ static u32 OS_GetSysClockHIGHRES() return (u32) ((now.QuadPart * 1000) / frequency.QuadPart); } +static u64 OS_GetSysClockHIGHRES_FULL() +{ + LARGE_INTEGER now; + QueryPerformanceCounter(&now); + now.QuadPart -= init_counter.QuadPart; + return (u64) ((now.QuadPart * 1000000) / frequency.QuadPart); +} + static u32 OS_GetSysClockNORMAL() { #ifdef _WIN32_WCE @@ -1091,6 +1116,12 @@ static u32 OS_GetSysClockNORMAL() #endif } +static u64 OS_GetSysClockNORMAL_FULL() +{ + u64 res = OS_GetSysClockNORMAL(); + return res*1000; +} + #endif /* WIN32 */ #if defined(__sh__) @@ -1176,9 +1207,11 @@ void gf_sys_init(Bool enable_memory_tracker) if (QueryPerformanceFrequency(&frequency)) { QueryPerformanceCounter(&init_counter); OS_GetSysClock = OS_GetSysClockHIGHRES; + OS_GetSysClockHR = OS_GetSysClockHIGHRES_FULL; GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("[core] using WIN32 performance timer\n")); } else { OS_GetSysClock = OS_GetSysClockNORMAL; + OS_GetSysClockHR = OS_GetSysClockNORMAL_FULL; GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("[core] using WIN32 regular timer\n")); } @@ -1247,6 +1280,7 @@ void gf_sys_init(Bool enable_memory_tracker) #endif sys_start_time = gf_sys_clock(); + sys_start_time_hr = gf_sys_clock_high_res(); #endif GF_LOG(GF_LOG_INFO, GF_LOG_CORE, ("[core] process id %d\n", the_rti.pid)); -- 2.30.2