}
-static void dump_nalu(FILE *dump, char *ptr, Bool is_svc, Bool is_hevc)
+static void dump_nalu(FILE *dump, char *ptr, u32 ptr_size, Bool is_svc, Bool is_hevc)
{
u8 type;
u8 dependency_id, quality_id, temporal_id;
case GF_AVC_NALU_SEI: fputs("SEI Message", dump); break;
case GF_AVC_NALU_SEQ_PARAM:
fputs("SequenceParameterSet", dump);
- gf_avc_get_sps_info(ptr, strlen(ptr), &sps_id, NULL, NULL, NULL, NULL);
+ gf_avc_get_sps_info(ptr, ptr_size, &sps_id, NULL, NULL, NULL, NULL);
fprintf(dump, "\" sps_id=\"%d", sps_id);
break;
case GF_AVC_NALU_PIC_PARAM:
fputs("PictureParameterSet", dump);
- gf_avc_get_pps_info(ptr+1, strlen(ptr)-1, &pps_id, &sps_id);
+ gf_avc_get_pps_info(ptr, ptr_size, &pps_id, &sps_id);
fprintf(dump, "\" pps_id=\"%d\" sps_id=\"%d", pps_id, sps_id);
break;
case GF_AVC_NALU_ACCESS_UNIT: fputs("AccessUnit delimiter", dump); break;
case GF_AVC_NALU_SVC_PREFIX_NALU: fputs("SVCPrefix", dump); break;
case GF_AVC_NALU_SVC_SUBSEQ_PARAM:
fputs("SVCSubsequenceParameterSet", dump);
- gf_avc_get_sps_info(ptr, strlen(ptr), &sps_id, NULL, NULL, NULL, NULL);
+ gf_avc_get_sps_info(ptr, ptr_size, &sps_id, NULL, NULL, NULL, NULL);
fprintf(dump, "\" sps_id=\"%d", sps_id);
break;
case GF_AVC_NALU_SLICE_AUX: fputs("Auxiliary Slice", dump); break;
for (i=0; i<gf_list_count(arr); i++) {\
slc = gf_list_get(arr, i);\
fprintf(dump, " <%s number=\"%d\" size=\"%d\"", name, i+1, slc->size);\
- dump_nalu(dump, slc->data , svccfg ? 1 : 0, is_hevc);\
+ dump_nalu(dump, slc->data, slc->size, svccfg ? 1 : 0, is_hevc);\
fprintf(dump, "/>\n");\
}\
}\
break;
} else {
fprintf(dump, " <NALU number=\"%d\" size=\"%d\" ", idx, nal_size);
- dump_nalu(dump, ptr, svccfg ? 1 : 0, is_hevc);
+ dump_nalu(dump, ptr, nal_size, svccfg ? 1 : 0, is_hevc);
fprintf(dump, "/>\n");
}
idx++;
" -time-shift TIME specifies MPD time shift buffer depth in seconds (default 0). Specify -1 to keep all files\n"
" -subdur DUR specifies maximum duration in ms of the input file to be dashed in LIVE or context mode.\n"
" NOTE: This does not change the segment duration: dashing stops once segments produced exceeded the duration.\n"
+ " -min-buffer TIME specifies MPD min buffer time in milliseconds\n"
+ " -ast-offset TIME specifies MPD AvailabilityStartTime offset in seconds. Default is 1 sec delay\n"
"\n"
"Advanced Options, should not be needed when using -dash-profile:\n"
" -subsegs-per-sidx N sets the number of subsegments to be written in each SIDX box\n"
" -daisy-chain uses daisy-chain SIDX instead of hierarchical. Ignored if frags/sidx is 0.\n"
" -single-segment uses a single segment for the whole file (OnDemand profile). \n"
" -single-file uses a single file for the whole file (default). \n"
- " -bs-switching MODE sets bitstream switching to \"yes\" (default), \"merge\", \"no\" or \"single\" to test with single input.\n"
+ " -bs-switching MODE sets bitstream switching to \"inband\" (default), \"merge\", \"no\" or \"single\" to test with single input.\n"
" -dash-ts-prog N program_number to be considered in case of an MPTS input file.\n"
"\n");
}
bw = gf_hinter_track_get_bandwidth(hinter);
tot_bw += bw;
flags = gf_hinter_track_get_flags(hinter);
+
+ //set extraction mode for AVC/SVC
+ gf_isom_set_nalu_extract_mode(file, i+1, GF_ISOM_NALU_EXTRACT_LAYER_ONLY);
+
gf_hinter_track_get_payload_name(hinter, szPayload);
fprintf(stderr, "Hinting track ID %d - Type \"%s:%s\" (%s) - BW %d kbps\n", gf_isom_get_track_id(file, i+1), gf_4cc_to_str(mtype), gf_4cc_to_str(mtype), szPayload, bw);
if (flags & GP_RTP_PCK_SYSTEMS_CAROUSEL) fprintf(stderr, "\tMPEG-4 Systems stream carousel enabled\n");
Bool HintIt, needSave, FullInter, Frag, HintInter, dump_std, dump_rtp, dump_mode, regular_iod, trackID, remove_sys_tracks, remove_hint, force_new, remove_root_od, import_subtitle, dump_chap;
Bool print_sdp, print_info, open_edit, track_dump_type, dump_isom, dump_cr, force_ocr, encode, do_log, do_flat, dump_srt, dump_ttxt, dump_timestamps, do_saf, dump_m2ts, dump_cart, do_hash, verbose, force_cat, align_cat, pack_wgt, single_group, dash_live;
char *inName, *outName, *arg, *mediaSource, *tmpdir, *input_ctx, *output_ctx, *drm_file, *avi2raw, *cprt, *chap_file, *pes_dump, *itunes_tags, *pack_file, *raw_cat, *seg_name, *dash_ctx_file;
-
+ Double min_buffer = 1.5;
+ u32 ast_shift_sec = 1;
char **mpd_base_urls = NULL;
u32 nb_mpd_base_urls=0;
if (!stricmp(argv[i+1], "no") || !stricmp(argv[i+1], "off")) bitstream_switching_mode = GF_DASH_BSMODE_NONE;
else if (!stricmp(argv[i+1], "merge")) bitstream_switching_mode = GF_DASH_BSMODE_MERGED;
else if (!stricmp(argv[i+1], "single")) bitstream_switching_mode = GF_DASH_BSMODE_SINGLE;
- else if (!stricmp(argv[i+1], "single_merge")) bitstream_switching_mode = GF_DASH_BSMODE_SINGLE_MERGED;
+ else if (!stricmp(argv[i+1], "inband")) bitstream_switching_mode = GF_DASH_BSMODE_INBAND;
else bitstream_switching_mode = GF_DASH_BSMODE_INBAND;
i++;
}
time_shift_depth = (u32) atoi(argv[i+1]);
i++;
}
+ else if (!stricmp(arg, "-min-buffer")) {
+ CHECK_NEXT_ARG
+ min_buffer = atoi(argv[i+1]);
+ min_buffer /= 1000;
+ i++;
+ }
+ else if (!stricmp(arg, "-ast-offset")) {
+ CHECK_NEXT_ARG
+ ast_shift_sec = (u32) atoi(argv[i+1]);
+ i++;
+ }
else if (!stricmp(arg, "-mpd-title")) { CHECK_NEXT_ARG dash_title = argv[i+1]; i++; }
else if (!stricmp(arg, "-mpd-source")) { CHECK_NEXT_ARG dash_source = argv[i+1]; i++; }
else if (!stricmp(arg, "-mpd-info-url")) { CHECK_NEXT_ARG dash_more_info = argv[i+1]; i++; }
if (dash_duration) {
char szMPD[GF_MAX_PATH], *sep;
GF_Config *dash_ctx = NULL;
+ u32 do_abort = 0;
gf_log_set_tool_level(GF_LOG_DASH, GF_LOG_INFO);
strcpy(outfile, outName ? outName : gf_url_get_resource_name(inName) );
sep = strrchr(outfile, '.');
strcpy(szMPD, outfile);
strcat(szMPD, ".mpd");
+ fprintf(stderr, "Live DASH-ing - press 'q' to quit, 's' to save context and quit\n");
+
if (!dash_ctx_file && dash_live) {
dash_ctx = gf_cfg_new(NULL, NULL);
} else if (dash_ctx_file) {
fprintf(stderr, "Using default MPD refresh of %d seconds\n", mpd_update_time);
}
- while (1) {
+ while (!do_abort) {
e = gf_dasher_segment_files(szMPD, dash_inputs, nb_dash_inputs, dash_profile, dash_title, dash_source, cprt, dash_more_info,
(const char **) mpd_base_urls, nb_mpd_base_urls,
use_url_template, single_segment, single_file, bitstream_switching_mode,
seg_at_rap, dash_duration, seg_name, seg_ext,
interleaving_time, subsegs_per_sidx, daisy_chain_sidx, frag_at_rap, tmpdir,
- dash_ctx, dash_dynamic, mpd_update_time, time_shift_depth, dash_subduration);
+ dash_ctx, dash_dynamic, mpd_update_time, time_shift_depth, dash_subduration, min_buffer, ast_shift_sec);
if (e) break;
if (dash_live) {
u32 sleep_for = gf_dasher_next_update_time(dash_ctx, mpd_update_time);
- if (gf_prompt_has_input()) {
- char c = (char) gf_prompt_get_char();
- if (c=='q') break;
- }
- if (sleep_for) {
+ fprintf(stderr, "sleep for %d ms\n", sleep_for);
+ while (1) {
+ if (gf_prompt_has_input()) {
+ char c = (char) gf_prompt_get_char();
+ if (c=='q') { do_abort = 1; break; }
+ if (c=='s') { do_abort = 2; break; }
+ }
+ if (sleep_for<100) break;
+
if (dash_dynamic != 2) {
- fprintf(stderr, "sleep for %d ms\n", sleep_for);
- gf_sleep(sleep_for);
+ gf_sleep(100);
}
+ sleep_for = gf_dasher_next_update_time(dash_ctx, mpd_update_time);
}
} else {
break;
}
}
-
- if (dash_ctx) gf_cfg_del(dash_ctx);
+
+ if (dash_ctx) {
+ if (do_abort==2) {
+ char szName[1024];
+ fprintf(stderr, "Enter file name to save dash context:\n");
+ if (scanf("%s", szName) == 1) {
+ gf_cfg_set_filename(dash_ctx, szName);
+ gf_cfg_save(dash_ctx);
+ }
+ }
+ gf_cfg_del(dash_ctx);
+ }
if (e) fprintf(stderr, "Error DASHing file: %s\n", gf_error_to_string(e));
gf_sys_close();
EOF
if $cc -o $TMPO $TMPC $js_flags $LDFLAGS $js_lib 2> /dev/null ; then
js_flags="-DUSE_FFDEV_12 $js_flags"
+ elif grep JSMutableHandleValue $js_inc/jsapi.h | grep JSHasInstanceOp > /dev/null 2>&1 ; then
+ js_flags="-DUSE_FFDEV_18 $js_flags"
elif grep JSMutableHandleValue $js_inc/jsapi.h > /dev/null 2>&1 ; then
- js_flags="-DUSE_FFDEV_17 $js_flags"
+ js_flags="-DUSE_FFDEV_18 $js_flags"
elif ! grep JS_ConstructObject $js_inc/jsapi.h > /dev/null 2>&1 ; then
js_flags="-DUSE_FFDEV_16 $js_flags"
elif grep JSHandleObject $js_inc/jsapi.h > /dev/null 2>&1 ; then
*/
char * gf_cfg_get_filename(GF_Config *iniFile);
+/*!
+ * Set the full filename associated with this config file
+ * \param iniFile The Configuration
+ * \param fileName new filename for the config
+ * \return erroro code
+ */
+GF_Err gf_cfg_set_filename(GF_Config *iniFile, const char * fileName);
+
#ifdef __cplusplus
}
#endif
GPAC_OTI_VIDEO_AVC_PS = 0x22,
/*!OTI for HEVC video */
GPAC_OTI_VIDEO_HEVC = 0x23,
+ /*!OTI for H264-SVC streams*/
+ GPAC_OTI_VIDEO_SVC = 0x24,
/*!OTI for MPEG-4 AAC streams*/
GPAC_OTI_AUDIO_AAC_MPEG4 = 0x40,
GF_RTP_PAYT_3GPP_DIMS,
/*use AC3 audio format*/
GF_RTP_PAYT_AC3,
+ /*use H264-SVC transport*/
+ GF_RTP_PAYT_H264_SVC,
};
{
GF_ISOM_BOX
GF_EditListBox *editList;
-
- Bool last_is_empty;
} GF_EditBox;
/*new APIs*/
#if (JS_VERSION>=185)
+#ifdef USE_FFDEV_18
+#define USE_FFDEV_17
+#endif
+
#ifdef USE_FFDEV_17
#define USE_FFDEV_16
#endif
#endif
#if (JS_VERSION>=185)
-#ifdef USE_FFDEV_15
+#if defined(USE_FFDEV_18)
+JSBool gf_sg_js_has_instance(JSContext *cx, JSHandleObject obj, JSMutableHandleValue vp, JSBool *bp);
+#elif defined(USE_FFDEV_15)
JSBool gf_sg_js_has_instance(JSContext *c, JSHandleObject obj,const jsval *val, JSBool *vp);
#else
JSBool gf_sg_js_has_instance(JSContext *c, JSObject *obj,const jsval *val, JSBool *vp);
*/
GF_Err gf_isom_get_sample_for_movie_time(GF_ISOFile *the_file, u32 trackNumber, u64 movieTime, u32 *StreamDescriptionIndex, u8 SearchMode, GF_ISOSample **sample, u32 *sampleNumber);
+/*return 1 if true edit list, 0 if no edit list or if time-shifting only edit list, in which case mediaOffset is set to the DTS offset value (e.g., your app should add mediaOffset to all sample DTS)*/
+Bool gf_isom_get_edit_list_type(GF_ISOFile *the_file, u32 trackNumber, s64 *mediaOffset);
+
/*get the number of edited segment*/
u32 gf_isom_get_edit_segment_count(GF_ISOFile *the_file, u32 trackNumber);
GF_DASH_BSMODE_NONE = 0,
GF_DASH_BSMODE_INBAND,
GF_DASH_BSMODE_MERGED,
- GF_DASH_BSMODE_SINGLE,
- GF_DASH_BSMODE_SINGLE_MERGED,
-
+ GF_DASH_BSMODE_SINGLE
} GF_DashSwitchingMode;
GF_Err gf_dasher_segment_files(const char *mpd_name, GF_DashSegmenterInput *inputs, u32 nb_inputs, GF_DashProfile profile,
Bool use_url_template, Bool single_segment, Bool single_file, GF_DashSwitchingMode bitstream_switching_mode,
Bool segments_start_with_rap, Double dash_duration_sec, char *seg_rad_name, char *seg_ext,
Double frag_duration_sec, s32 subsegs_per_sidx, Bool daisy_chain_sidx, Bool fragments_start_with_rap, const char *tmp_dir,
- GF_Config *dash_ctx, u32 dash_dynamic, u32 mpd_update_time, u32 time_shift_depth, Double subduration);
+ GF_Config *dash_ctx, u32 dash_dynamic, u32 mpd_update_time, u32 time_shift_depth, Double subduration, Double min_buffer, u32 ast_shift_sec);
/*returns time to wait until end of currently generated segments*/
u32 gf_dasher_next_update_time(GF_Config *dash_ctx, u32 mpd_update_time);
GF_Err gf_rtp_streamer_append_sdp_extended(GF_RTPStreamer *rtp, u16 ESID, char *dsi, u32 dsi_len, GF_ISOFile *isofile, u32 isotrack, char *KMS_URI, u32 width, u32 height, char **out_sdp_buffer) ;
+GF_Err gf_rtp_streamer_append_sdp_decoding_dependency(GF_ISOFile *isofile, u32 isotrack, u8 *payload_type, char **out_sdp_buffer);
+
GF_Err gf_rtp_streamer_send_au(GF_RTPStreamer *rtp, char *data, u32 size, u64 cts, u64 dts, Bool is_rap);
GF_Err gf_rtp_streamer_send_au_with_sn(GF_RTPStreamer *rtp, char *data, u32 size, u64 cts, u64 dts, Bool is_rap, u32 inc_au_sn);
GF_Err gf_rtp_streamer_send_rtcp(GF_RTPStreamer *streamer, Bool force_ts, u32 rtp_ts);
+u8 gf_rtp_streamer_get_payload_type(GF_RTPStreamer *streamer);
+
/*! @} */
#ifdef __cplusplus
#undef USE_AVCODEC2
#endif
+#if (LIBAVCODEC_VERSION_MAJOR >= 54) && (LIBAVCODEC_VERSION_MINOR >= 35)
+#define USE_AVCTX3
+#endif
+
/**
frame = &ffd->base_frame;
}
if (!(*ctx)){
+
+#ifdef USE_AVCTX3
+ *ctx = avcodec_alloc_context3(NULL);
+#else
*ctx = avcodec_alloc_context();
+#endif
}
/*private FFMPEG DSI*/
if ((ffd->oti==GPAC_OTI_VIDEO_MPEG4_PART2) || (ffd->oti == GPAC_OTI_VIDEO_AVC)) {
/*if not set this may be a remap of non-mpeg4 transport (eg, transport on MPEG-TS) where
the DSI is carried in-band*/
- if (esd->decoderConfig->decoderSpecificInfo->data) {
+ if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
/*for regular MPEG-4, try to decode and if this fails try H263 decoder at first frame*/
if (ffd->oti==GPAC_OTI_VIDEO_MPEG4_PART2) {
(*ctx)->pix_fmt = ffd->raw_pix_fmt;
if ((*ctx)->extradata && strstr((*ctx)->extradata, "BottomUp")) ffd->flipped = 1;
} else {
+#ifdef USE_AVCTX3
+ if (avcodec_open2((*ctx), (*codec), NULL )<0) return GF_NON_COMPLIANT_BITSTREAM;
+#else
if (avcodec_open((*ctx), (*codec) )<0) return GF_NON_COMPLIANT_BITSTREAM;
+#endif
}
/*setup audio streams*/
here this means the DSI was broken, so no big deal*/
avcodec_close(ctx);
*codec = avcodec_find_decoder(CODEC_ID_H263);
+
+#ifdef USE_AVCTX3
+ if (! (*codec) || (avcodec_open2(ctx, *codec, NULL)<0)) return GF_NON_COMPLIANT_BITSTREAM;
+#else
if (! (*codec) || (avcodec_open(ctx, *codec)<0)) return GF_NON_COMPLIANT_BITSTREAM;
+#endif
+
#if USE_AVCODEC2
if (avcodec_decode_video2(ctx, frame, &gotpic, &pkt) < 0) {
#else
avcodec_close(ctx);
*codec = avcodec_find_decoder(old_codec);
assert(*codec);
+#ifdef USE_AVCTX3
+ avcodec_open2(ctx, *codec, NULL);
+#else
avcodec_open(ctx, *codec);
+#endif
return GF_NON_COMPLIANT_BITSTREAM;
}
}
}
}
- if (mmlevel == GF_CODEC_LEVEL_SEEK) return GF_OK;
+// if (mmlevel == GF_CODEC_LEVEL_SEEK) return GF_OK;
if (!gotpic) return GF_OK;
#define AVERROR_NOFMT AVERROR(EINVAL)
#endif /* AVERROR_NOFMT */
+
+#if (LIBAVFORMAT_VERSION_MAJOR >= 54) && (LIBAVFORMAT_VERSION_MINOR >= 20)
+
+#define av_find_stream_info(__c) avformat_find_stream_info(__c, NULL)
+#ifndef FF_API_FORMAT_PARAMETERS
+#define FF_API_FORMAT_PARAMETERS 1
+#endif
+
+#endif
+
+
+
static u32 FFDemux_Run(void *par)
{
AVPacket pkt;
Bool has_edit_list;
u32 sample_num;
+ s64 dts_offset;
/*for edit lists*/
u32 edit_sync_frame;
u64 sample_time, start, end;
break;
}
- ch->has_edit_list = gf_isom_get_edit_segment_count(ch->owner->mov, ch->track) ? 1 : 0;
+ ch->has_edit_list = gf_isom_get_edit_list_type(ch->owner->mov, ch->track, &ch->dts_offset) ? 1 : 0;
ch->has_rap = (gf_isom_has_sync_points(ch->owner->mov, ch->track)==1) ? 1 : 0;
ch->time_scale = gf_isom_get_media_timescale(ch->owner->mov, ch->track);
ch->last_state = GF_ISOM_INCOMPLETE_FILE;
} else if (ch->sample_num) {
ch->last_state = (ch->owner->frag_type==1) ? GF_OK : GF_EOS;
+ ch->to_init = 0;
}
} else {
ch->sample_time = ch->sample->DTS;
+ ch->to_init = 0;
}
- ch->to_init = 0;
ch->current_slh.decodingTimeStamp = ch->start;
ch->current_slh.compositionTimeStamp = ch->start;
ch->current_slh.randomAccessPointFlag = ch->sample ? ch->sample->IsRAP : 0;
} else {
/*if we get the same sample, figure out next interesting time (current sample + DTS gap to next sample should be a good bet)*/
if (prev_sample == ch->sample_num) {
- u32 time_diff = 2;
- u32 sample_num = ch->sample_num ? ch->sample_num : 1;
- GF_ISOSample *s1 = gf_isom_get_sample(ch->owner->mov, ch->track, sample_num, NULL);
- GF_ISOSample *s2 = gf_isom_get_sample(ch->owner->mov, ch->track, sample_num+1, NULL);
+ if (ch->owner->frag_type && (ch->sample_num==gf_isom_get_sample_count(ch->owner->mov, ch->track))) {
+ if (ch->sample)
+ gf_isom_sample_del(&ch->sample);
+ } else {
+ u32 time_diff = 2;
+ u32 sample_num = ch->sample_num ? ch->sample_num : 1;
+ GF_ISOSample *s1 = gf_isom_get_sample(ch->owner->mov, ch->track, sample_num, NULL);
+ GF_ISOSample *s2 = gf_isom_get_sample(ch->owner->mov, ch->track, sample_num+1, NULL);
- gf_isom_sample_del(&ch->sample);
+ gf_isom_sample_del(&ch->sample);
- if (s2 && s1) {
- assert(s2->DTS >= s1->DTS);
- time_diff = (u32) (s2->DTS - s1->DTS);
- e = gf_isom_get_sample_for_movie_time(ch->owner->mov, ch->track, ch->sample_time + time_diff, &ivar, GF_ISOM_SEARCH_FORWARD, &ch->sample, &ch->sample_num);
- } else if (s1 && !s2) {
- e = GF_EOS;
- }
- gf_isom_sample_del(&s1);
- gf_isom_sample_del(&s2);
+ if (s2 && s1) {
+ assert(s2->DTS >= s1->DTS);
+ time_diff = (u32) (s2->DTS - s1->DTS);
+ e = gf_isom_get_sample_for_movie_time(ch->owner->mov, ch->track, ch->sample_time + time_diff, &ivar, GF_ISOM_SEARCH_FORWARD, &ch->sample, &ch->sample_num);
+ } else if (s1 && !s2) {
+ e = GF_EOS;
+ }
+ gf_isom_sample_del(&s1);
+ gf_isom_sample_del(&s2);
+ }
}
/*we jumped to another segment - if RAP is needed look for closest rap in decoding order and
gf_isom_sample_del(&ch->sample);
goto fetch_next;
}
+ if (ch->sample && ch->dts_offset) {
+ if ( (ch->dts_offset<0) && (ch->sample->DTS < (u64) -ch->dts_offset)) {
+ ch->sample->DTS = 0;
+ } else {
+ ch->sample->DTS += ch->dts_offset;
+ }
+ }
}
if (!ch->sample) {
/*incomplete file - check if we're still downloading or not*/
e = compositor->video_out->ProcessEvent(compositor->video_out, &evt);
if (e) return e;
- if (compositor->has_size_info) {
- compositor->traverse_state->vp_size.x = INT2FIX(compositor->scene_width);
- compositor->traverse_state->vp_size.y = INT2FIX(compositor->scene_height);
- } else {
- compositor->traverse_state->vp_size.x = INT2FIX(compositor->output_width);
- compositor->traverse_state->vp_size.y = INT2FIX(compositor->output_height);
- }
compositor->was_opengl = evt.setup.opengl_mode;
- compositor->was_system_memory = evt.setup.system_memory;
-
+ compositor->was_system_memory = evt.setup.system_memory;
+ }
+ if (compositor->has_size_info) {
+ compositor->traverse_state->vp_size.x = INT2FIX(compositor->scene_width);
+ compositor->traverse_state->vp_size.y = INT2FIX(compositor->scene_height);
+ } else {
+ compositor->traverse_state->vp_size.x = INT2FIX(compositor->output_width);
+ compositor->traverse_state->vp_size.y = INT2FIX(compositor->output_height);
}
/*set scale factor*/
#pragma comment (linker, EXPORT_SYMBOL(gf_cfg_insert_key) )
#pragma comment (linker, EXPORT_SYMBOL(gf_cfg_del_section) )
#pragma comment (linker, EXPORT_SYMBOL(gf_cfg_get_filename) )
-
+#pragma comment (linker, EXPORT_SYMBOL(gf_cfg_set_filename) )
#ifndef GPAC_DISABLE_PLAYER
#pragma comment (linker, EXPORT_SYMBOL(gf_modules_new) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_sample_duration) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_sample_size) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_sample_sync) )
+#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_edit_list_type) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_edit_segment_count) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_edit_segment) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_get_copyright_count) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_del) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_append_sdp) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_append_sdp_extended) )
+#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_append_sdp_decoding_dependency) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_send_au) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_send_au_with_sn) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_send_data) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_format_sdp_header) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_disable_auto_rtcp) )
#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_send_rtcp) )
+#pragma comment (linker, EXPORT_SYMBOL(gf_rtp_streamer_get_payload_type) )
#pragma comment (linker, EXPORT_SYMBOL(gf_isom_streamer_new) )
case GF_RTP_PAYT_AC3:
strcpy(szMediaName, "audio");
strcpy(szPayloadName, "ac3");
- return 1;
+ return 1;
+ case GF_RTP_PAYT_H264_SVC:
+ strcpy(szMediaName, "video");
+ strcpy(szPayloadName, "H264-SVC");
+ return 1;
default:
strcpy(szMediaName, "");
strcpy(szPayloadName, "");
rtp_type = GF_RTP_PAYT_H264_AVC;
PL_ID = 0x0F;
break;
+ /*H264-SVC*/
+ case GPAC_OTI_VIDEO_SVC:
+ required_rate = 90000; /* "90 kHz clock rate MUST be used"*/
+ rtp_type = GF_RTP_PAYT_H264_SVC;
+ PL_ID = 0x0F;
+ break;
}
break;
sprintf(sdpLine, "a=fmtp:%d maxptime=%d\n", rtp->packetizer->PayloadType, rtp->packetizer->auh_size*20);
}
/*H264/AVC*/
- else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC) {
+ else if (rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_AVC || rtp->packetizer->rtp_payt == GF_RTP_PAYT_H264_SVC) {
GF_AVCConfig *avcc = dsi ? gf_odf_avc_cfg_read(dsi, dsi_len) : NULL;
if (avcc) {
return GF_OK;
}
+GF_EXPORT
+GF_Err gf_rtp_streamer_append_sdp_decoding_dependency(GF_ISOFile *isofile, u32 isotrack, u8 *payload_type, char **out_sdp_buffer)
+{
+ u32 size, i, ref_track;
+ s32 count;
+ char sdp[20000], sdpLine[10000];
+
+ sprintf(sdp, "a=mid:L%d\n", isotrack);
+
+ count = gf_isom_get_reference_count(isofile, isotrack, GF_ISOM_REF_SCAL);
+ if (count > 0)
+ {
+ sprintf(sdpLine, "a=depend:%d lay", payload_type[isotrack-1]);
+ strcat(sdp, sdpLine);
+ for (i = 0; i < (u32) count; i++)
+ {
+ gf_isom_get_reference(isofile, isotrack, GF_ISOM_REF_SCAL, i+1, &ref_track);
+ sprintf(sdpLine, " L%d:%d", ref_track, payload_type[ref_track-1]);
+ strcat(sdp, sdpLine);
+ }
+ strcat(sdp, "\n");
+ }
+
+ size = strlen(sdp) + (*out_sdp_buffer ? strlen(*out_sdp_buffer) : 0) + 1;
+ if ( !*out_sdp_buffer) {
+ *out_sdp_buffer = gf_malloc(sizeof(char)*size);
+ if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
+ strcpy(*out_sdp_buffer, sdp);
+ } else {
+ *out_sdp_buffer = gf_realloc(*out_sdp_buffer, sizeof(char)*size);
+ if (! *out_sdp_buffer) return GF_OUT_OF_MEM;
+ strcat(*out_sdp_buffer, sdp);
+ }
+ return GF_OK;
+}
+
GF_EXPORT
char *gf_rtp_streamer_format_sdp_header(char *app_name, char *ip_dest, char *session_name, char *iod64)
{
return gf_rtp_send_rtcp_report(streamer->channel, NULL, NULL);
}
+GF_EXPORT
+u8 gf_rtp_streamer_get_payload_type(GF_RTPStreamer *streamer)
+{
+ return streamer->packetizer->PayloadType;
+}
+
#endif /*GPAC_DISABLE_STREAMING && GPAC_DISABLE_ISOM*/
if (is_hevc) {
/*we already wrote this stuff*/
- if (nal_type==GF_HEVC_NALU_ACCESS_UNIT)
+ if (nal_type==GF_HEVC_NALU_ACCESS_UNIT) {
+ gf_bs_skip_bytes(src_bs, nal_size-2);
continue;
+ }
/*rewrite nal*/
gf_bs_read_data(src_bs, buffer, nal_size-2);
}
/*we already wrote this stuff*/
- if (nal_type==GF_AVC_NALU_ACCESS_UNIT)
+ if (nal_type==GF_AVC_NALU_ACCESS_UNIT) {
+ gf_bs_skip_bytes(src_bs, nal_size-1);
continue;
+ }
//extractor
if (nal_type == 31) {
u64 firstDTS;
#endif
u32 i;
+ Bool last_is_empty = 0;
u64 time, lastSampleTime;
s64 mtime;
GF_EdtsEntry *ent;
goto ent_found;
}
time += ent->segmentDuration;
+ last_is_empty = ent->segmentDuration ? 0 : 1;
}
+
+ if (last_is_empty) {
+ ent = (GF_EdtsEntry *)gf_list_last(trak->editBox->editList->entryList);
+ if (ent->mediaRate==1) {
+ *MediaTime = movieTime + ent->mediaTime;
+ } else {
+ ent = (GF_EdtsEntry *)gf_list_get(trak->editBox->editList->entryList, 0);
+ if (ent->mediaRate==-1) {
+ u64 dur = (u64) (ent->segmentDuration * scale_ts);
+ *MediaTime = (movieTime > dur) ? (movieTime-dur) : 0;
+ }
+ }
+ *useEdit = 0;
+ return GF_OK;
+ }
+
+
//we had nothing in the list (strange file but compliant...)
//return the 1 to 1 mapped vale of the last media sample
if (!ent) {
}
+//get the number of edited segment
+GF_EXPORT
+Bool gf_isom_get_edit_list_type(GF_ISOFile *the_file, u32 trackNumber, s64 *mediaOffset)
+{
+ GF_EdtsEntry *ent;
+ GF_TrackBox *trak;
+ u32 count;
+ trak = gf_isom_get_track_from_file(the_file, trackNumber);
+ if (!trak) return 0;
+ *mediaOffset = 0;
+ if (!trak->editBox || !trak->editBox->editList) return 0;
+
+ ent = gf_list_last(trak->editBox->editList->entryList);
+ if (!ent || ent->segmentDuration) return 1;
+
+ count = gf_list_count(trak->editBox->editList->entryList);
+ /*mediaRate>0, the track playback shall start at media time>0 -> mediaOffset is < 0 */
+ if ((count==1) && (ent->mediaRate==1)) {
+ *mediaOffset = - ent->mediaTime;
+ return 0;
+ } else if (count==2) {
+ ent = gf_list_get(trak->editBox->editList->entryList, 0);
+ /*mediaRate==-1, the track playback shall be empty for segmentDuration -> mediaOffset is > 0 */
+ if (ent->mediaRate==-1) {
+ Double time = (Double) ent->segmentDuration;
+ time /= trak->moov->mvhd->timeScale;
+ time *= trak->Media->mediaHeader->timeScale;
+
+ *mediaOffset = (s64) time;
+ return 0;
+ }
+ }
+ return 1;
+}
+
+
//get the number of edited segment
GF_EXPORT
u32 gf_isom_get_edit_segment_count(GF_ISOFile *the_file, u32 trackNumber)
//if we have an edit list, the duration is the sum of all the editList
//entries' duration (always expressed in MovieTimeScale)
- if (trak->editBox && !trak->editBox->last_is_empty && trak->editBox->editList) {
+ if (trak->editBox && trak->editBox->editList) {
trackDuration = 0;
elst = trak->editBox->editList;
i=0;
while ((ent = (GF_EdtsEntry*)gf_list_enum(elst->entryList, &i))) {
trackDuration += ent->segmentDuration;
- if (ent->mediaRate && !ent->segmentDuration) {
- trak->editBox->last_is_empty = 1;
- }
- }
-
- if (trak->editBox->last_is_empty) {
- ent = (GF_EdtsEntry*) gf_list_last(elst->entryList);
- ent->segmentDuration = trackDuration;
}
}
+ if (!trackDuration) {
+ trackDuration = (trak->Media->mediaHeader->duration * trak->moov->mvhd->timeScale) / trak->Media->mediaHeader->timeScale;
+ }
trak->Header->duration = trackDuration;
trak->Header->modificationTime = gf_isom_get_mp4time();
return GF_OK;
e = GF_NON_COMPLIANT_BITSTREAM;
goto exit;
}
+ /*nal hdr*/ gf_bs_read_int(bs, 8);
+
*pps_id = bs_get_ue(bs);
*sps_id = bs_get_ue(bs);
u32 nb_segments_done;\r
\r
Bool segment_must_be_streamed;\r
+ Bool broken_timing;\r
\r
u32 force_representation_idx_plus_one;\r
\r
return 0;\r
}\r
\r
-static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group)\r
+static u64 gf_dash_get_utc_clock()\r
{\r
u32 sec, frac;\r
+\r
#ifndef _WIN32_WCE\r
time_t gtime;\r
struct tm *_t;\r
#endif\r
u64 current_time;\r
- \r
- if (mpd->type==GF_MPD_TYPE_STATIC) \r
- return;\r
- \r
- /*M3U8 does not use NTP sync */\r
- if (group->dash->is_m3u8)\r
- return;\r
\r
gf_net_get_ntp(&sec, &frac);\r
\r
#ifndef _WIN32_WCE\r
gtime = sec - GF_NTP_SEC_1900_TO_1970;\r
_t = gmtime(>ime);\r
- current_time = mktime(_t);\r
+\r
+#ifdef GPAC_ANDROID\r
+ {\r
+ /*FIXME - finad a safe way to estimate timezone this does not work !!*/\r
+ s32 t_timezone;\r
+ struct tm t_gmt, t_local;\r
+ time_t t_time;\r
+ t_time = time(NULL);\r
+ t_gmt = *gmtime(&t_time);\r
+ t_local = *localtime(&t_time);\r
+ \r
+ t_timezone = (t_gmt.tm_hour - t_local.tm_hour) * 3600;\r
+ current_time = mktime(_t) - t_timezone;\r
+ }\r
+#else\r
+ current_time = mktime(_t) - timezone;\r
+#endif\r
+\r
#else\r
current_time = sec - GF_NTP_SEC_1900_TO_1970;\r
#endif\r
+ return current_time;\r
+}\r
+\r
+static void gf_dash_group_timeline_setup(GF_MPD *mpd, GF_DASH_Group *group)\r
+{\r
+ GF_MPD_SegmentTimeline *timeline = NULL;\r
+ GF_MPD_Representation *rep = NULL;\r
+ u32 shift, timescale;\r
+ u64 current_time;\r
+ \r
+ if (mpd->type==GF_MPD_TYPE_STATIC) \r
+ return;\r
+ \r
+ /*M3U8 does not use NTP sync */\r
+ if (group->dash->is_m3u8)\r
+ return;\r
+\r
+ current_time = gf_dash_get_utc_clock();\r
+\r
if (current_time < mpd->availabilityStartTime) current_time = 0;\r
else current_time -= mpd->availabilityStartTime;\r
\r
- if (current_time < group->period->start) current_time = 0;\r
- else current_time -= group->period->start;\r
+ if (current_time < group->period->start/1000) current_time = 0;\r
+ else current_time -= group->period->start/1000;\r
\r
#if 0\r
{\r
}\r
#endif\r
\r
- frac = mpd->time_shift_buffer_depth/1000;\r
- if (current_time < frac) current_time = 0;\r
- else current_time -= frac;\r
+ if (0 && ((s32) mpd->time_shift_buffer_depth>=0)) {\r
+ shift = mpd->time_shift_buffer_depth/1000;\r
+ if (current_time < shift) current_time = 0;\r
+ else current_time -= shift;\r
+ }\r
+\r
+ timeline = NULL;\r
+ timescale=1;\r
+ rep = gf_list_get(group->adaptation_set->representations, group->active_rep_index);\r
+\r
+ if (group->period->segment_list) {\r
+ if (group->period->segment_list->segment_timeline) timeline = group->period->segment_list->segment_timeline;\r
+ if (group->period->segment_list->timescale) timescale = group->period->segment_list->timescale;\r
+ }\r
+ if (group->adaptation_set->segment_list) {\r
+ if (group->adaptation_set->segment_list->segment_timeline) timeline = group->adaptation_set->segment_list->segment_timeline;\r
+ if (group->adaptation_set->segment_list->timescale) timescale = group->adaptation_set->segment_list->timescale;\r
+ }\r
+ if (rep->segment_list) {\r
+ if (rep->segment_list->segment_timeline) timeline = rep->segment_list->segment_timeline;\r
+ if (rep->segment_list->timescale) timescale = rep->segment_list->timescale;\r
+ }\r
+\r
+ if (group->period->segment_template) {\r
+ if (group->period->segment_template->segment_timeline) timeline = group->period->segment_template->segment_timeline;\r
+ if (group->period->segment_template->timescale) timescale = group->period->segment_template->timescale;\r
+ }\r
+ if (group->adaptation_set->segment_template) {\r
+ if (group->adaptation_set->segment_template->segment_timeline) timeline = group->adaptation_set->segment_template->segment_timeline;\r
+ if (group->adaptation_set->segment_template->timescale) timescale = group->adaptation_set->segment_template->timescale;\r
+ }\r
+ if (rep->segment_template) {\r
+ if (rep->segment_template->segment_timeline) timeline = rep->segment_template->segment_timeline;\r
+ if (rep->segment_template->timescale) timescale = rep->segment_template->timescale;\r
+ }\r
\r
+ if (timeline) {\r
+ u64 start_segtime = 0;\r
+ u64 segtime = 0;\r
+ u32 i, seg_idx = 0;\r
+ current_time *= timescale;\r
+ for (i=0; i<gf_list_count(timeline->entries); i++) {\r
+ u32 repeat;\r
+ GF_MPD_SegmentTimelineEntry *ent = gf_list_get(timeline->entries, i);\r
+ if (!segtime) start_segtime = segtime = ent->start_time;\r
+\r
+ repeat = 1+ent->repeat_count;\r
+ while (repeat) {\r
+ if ((current_time >= segtime) && (current_time < segtime + ent->duration)) {\r
+ group->download_segment_index = seg_idx;\r
+ group->nb_segments_in_rep = seg_idx + 10;\r
+ GF_LOG(GF_LOG_INFO, GF_LOG_DASH, ("[DASH] Found segment %d for current time "LLU" is in SegmentTimeline ["LLU"-"LLU"] - cannot estimate current startNumber, default to 0 ...\n", current_time, segtime, segtime + ent->duration));\r
+ return;\r
+ }\r
+ segtime += ent->duration;\r
+ repeat--;\r
+ seg_idx++;\r
+ }\r
+ }\r
+ //NOT FOUND !!\r
+ GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] current time "LLU" is NOT in SegmentTimeline ["LLU"-"LLU"] - cannot estimate current startNumber, default to 0 ...\n", current_time, start_segtime, segtime));\r
+ group->download_segment_index = 0;\r
+ group->nb_segments_in_rep = 10;\r
+ group->broken_timing = 1;\r
+ return;\r
+ }\r
\r
if (group->segment_duration) {\r
Double nb_seg = (Double) current_time;\r
nb_seg /= group->segment_duration;\r
- frac = (u32) nb_seg;\r
- group->download_segment_index = frac;\r
- group->nb_segments_in_rep = frac + 10;\r
+ shift = (u32) nb_seg;\r
+ group->download_segment_index = shift;\r
+ group->nb_segments_in_rep = shift + 10;\r
} else {\r
GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[DASH] Segment duration unknown - cannot estimate current startNumber\n"));\r
}\r
}\r
}\r
\r
-static u64 gf_dash_segment_timeline_start(GF_MPD_SegmentTimeline *timeline, u32 segment_index)\r
+static u64 gf_dash_segment_timeline_start(GF_MPD_SegmentTimeline *timeline, u32 segment_index, u64 *segment_duration)\r
{\r
u64 start_time = 0;\r
u32 i, idx, k;\r
GF_MPD_SegmentTimelineEntry *ent = gf_list_get(timeline->entries, i);\r
if (ent->start_time) start_time = ent->start_time; \r
for (k=0; k<ent->repeat_count+1; k++) {\r
- if (idx==segment_index) \r
+ if (idx==segment_index) {\r
+ if (segment_duration) \r
+ *segment_duration = ent->duration;\r
return start_time;\r
+ }\r
idx ++;\r
start_time += ent->duration;\r
}\r
return start_time;\r
}\r
\r
-static Double gf_dash_get_segment_start_time(GF_DASH_Group *group)\r
+static Double gf_dash_get_segment_start_time(GF_DASH_Group *group, Double *segment_duration)\r
{\r
GF_MPD_Representation *rep;\r
GF_MPD_AdaptationSet *set;\r
rep = gf_list_get(group->adaptation_set->representations, group->active_rep_index);\r
set = group->adaptation_set;\r
period = group->period;\r
- segment_index = group->download_segment_index - group->nb_cached_segments;\r
+ segment_index = 0;\r
+ if (group->download_segment_index >= group->nb_cached_segments)\r
+ segment_index = group->download_segment_index - group->nb_cached_segments;\r
\r
/*single segment: return nothing*/\r
if (rep->segment_base || set->segment_base || period->segment_base) {\r
if (! timescale) timescale=1;\r
\r
if (timeline) {\r
- start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index);\r
+ start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index, &duration);\r
} else {\r
start_time = segment_index * (Double) duration;\r
}\r
start_time /= timescale;\r
+ if (segment_duration) {\r
+ *segment_duration = (Double) duration; \r
+ *segment_duration /= timescale; \r
+ }\r
return start_time;\r
}\r
\r
if (!timescale) timescale=1;\r
\r
if (timeline) {\r
- start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index);\r
+ start_time = (Double) gf_dash_segment_timeline_start(timeline, segment_index, &duration);\r
} else {\r
start_time = segment_index * (Double) duration;\r
}\r
start_time /= timescale;\r
+ if (segment_duration) {\r
+ *segment_duration = (Double) duration; \r
+ *segment_duration /= timescale; \r
+ }\r
+\r
return start_time;\r
}\r
\r
+u64 gf_dash_get_segment_availability_start_time(GF_MPD *mpd, GF_DASH_Group *group, u32 segment_index)\r
+{\r
+ Double seg_ast, seg_dur;\r
+ seg_ast = gf_dash_get_segment_start_time(group, &seg_dur);\r
+\r
+ seg_ast += seg_dur;\r
+ seg_ast += group->period->start/1000;\r
+ seg_ast += mpd->availabilityStartTime;\r
+ return (u64) seg_ast;\r
+}\r
+\r
static void gf_dash_resolve_duration(GF_MPD_Representation *rep, GF_MPD_AdaptationSet *set, GF_MPD_Period *period, u64 *out_duration, u32 *out_timescale, u64 *out_pts_offset, GF_MPD_SegmentTimeline **out_segment_timeline)\r
{\r
u32 timescale = 0;\r
\r
for (group_idx=0; group_idx<gf_list_count(dash->groups); group_idx++) {\r
GF_DASH_Group *group = gf_list_get(dash->groups, group_idx);\r
- Double group_start = gf_dash_get_segment_start_time(group);\r
+ Double group_start = gf_dash_get_segment_start_time(group, NULL);\r
if (!group_idx || (timeline_start_time > group_start) ) timeline_start_time = group_start;\r
}\r
/*we can rewind our segments from timeshift*/\r
/* Now that the playlist is up to date, we can check again */\r
if (group->download_segment_index >= group->nb_segments_in_rep) {\r
/* if there is a specified update period, we redo the whole process */\r
- if (dash->mpd->minimum_update_period ) {\r
+ if (dash->mpd->minimum_update_period || dash->mpd->type==GF_MPD_TYPE_DYNAMIC) {\r
continue;\r
} else {\r
/* if not, we are really at the end of the playlist, we can quit */\r
continue;\r
}\r
\r
+ /*check availablity start time of segment in Live !!*/\r
+ if (!group->broken_timing && (dash->mpd->type==GF_MPD_TYPE_DYNAMIC) && !dash->is_m3u8) {\r
+ u64 segment_ast = gf_dash_get_segment_availability_start_time(dash->mpd, group, group->download_segment_index);\r
+ u64 now = gf_dash_get_utc_clock();\r
+ /*if segment AST is greater than now, it is not yet available ...*/\r
+ if (segment_ast > now ) {\r
+ GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DASH] Next segment %d is not yet available on server - requesting later (in %d ms)\n", group->download_segment_index, segment_ast - now));\r
+ gf_mx_v(dash->dl_mutex);\r
+ continue;\r
+ }\r
+ }\r
+\r
/* At this stage, there are some segments left to be downloaded */\r
e = gf_dash_resolve_url(dash->mpd, rep, group, dash->base_url, GF_DASH_RESOLVE_URL_MEDIA, group->download_segment_index, &new_base_seg_url, &start_range, &end_range, &group->current_downloaded_segment_duration, NULL);\r
gf_mx_v(dash->dl_mutex);\r
Double gf_dash_group_current_segment_start_time(GF_DashClient *dash, u32 idx)\r
{\r
GF_DASH_Group *group = gf_list_get(dash->groups, idx);\r
- return gf_dash_get_segment_start_time(group);\r
+ return gf_dash_get_segment_start_time(group, NULL);\r
}\r
\r
GF_EXPORT\r
nb_video = nb_audio = nb_text = nb_scene = 0;\r
//duplicates all tracks\r
for (i=0; i<gf_isom_get_track_count(input); i++) {\r
- u32 _w, _h, _sr, _nb_ch;\r
+ u32 _w, _h, _sr, _nb_ch, avctype;\r
\r
u32 mtype = gf_isom_get_media_type(input, i+1);\r
if (mtype == GF_ISOM_MEDIA_HINT) continue;\r
\r
if (! dash_moov_setup) {\r
- u32 avctype;\r
e = gf_isom_clone_track(input, i+1, output, 0, &TrackNum);\r
if (e) goto err_exit;\r
\r
\r
continue;\r
}\r
- avctype = gf_isom_get_avc_svc_type(input, i+1, 1);\r
- if (avctype==GF_ISOM_AVCTYPE_AVC_ONLY) {\r
- /*for AVC we concatenate SPS/PPS*/\r
- if (dash_cfg->inband_param_set) \r
- gf_isom_set_nalu_extract_mode(input, i+1, GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG);\r
- }\r
- else if (avctype > GF_ISOM_AVCTYPE_AVC_ONLY) {\r
- /*for SVC we don't want any rewrite of extractors, and we don't concatenate SPS/PPS*/\r
- gf_isom_set_nalu_extract_mode(input, i+1, GF_ISOM_NALU_EXTRACT_INSPECT);\r
- }\r
} else {\r
TrackNum = gf_isom_get_track_by_id(output, gf_isom_get_track_id(input, i+1));\r
count = gf_isom_get_sample_count(input, i+1);\r
}\r
\r
+ /*set extraction mode whether setup or not*/\r
+ avctype = gf_isom_get_avc_svc_type(input, i+1, 1);\r
+ if (avctype==GF_ISOM_AVCTYPE_AVC_ONLY) {\r
+ /*for AVC we concatenate SPS/PPS*/\r
+ if (dash_cfg->inband_param_set) \r
+ gf_isom_set_nalu_extract_mode(input, i+1, GF_ISOM_NALU_EXTRACT_INBAND_PS_FLAG);\r
+ }\r
+ else if (avctype > GF_ISOM_AVCTYPE_AVC_ONLY) {\r
+ /*for SVC we don't want any rewrite of extractors, and we don't concatenate SPS/PPS*/\r
+ gf_isom_set_nalu_extract_mode(input, i+1, GF_ISOM_NALU_EXTRACT_INSPECT);\r
+ }\r
+\r
if (mtype == GF_ISOM_MEDIA_VISUAL) nb_video++;\r
else if (mtype == GF_ISOM_MEDIA_AUDIO) nb_audio++;\r
else if (mtype == GF_ISOM_MEDIA_TEXT) nb_text++;\r
&defaultDuration, &defaultSize, &defaultDescriptionIndex, &defaultRandomAccess, &defaultPadding, &defaultDegradationPriority);\r
}\r
\r
- gf_media_get_rfc_6381_codec_name(output, TrackNum, szCodec);\r
+ gf_media_get_rfc_6381_codec_name(input, i+1, szCodec);\r
if (strlen(szCodecs)) strcat(szCodecs, ",");\r
strcat(szCodecs, szCodec);\r
\r
switch_segment = 0;\r
first_sample_in_segment = 1;\r
\r
- if (dash_cfg && dash_cfg->subduration && (segment_start_time>=1000*dash_cfg->subduration)) {\r
- /*done with file (exceeded the requested duration) : store all fragmenters state and abord*/\r
+ if (dash_cfg && dash_cfg->subduration && (segment_start_time + MaxSegmentDuration/2 >= 1000*dash_cfg->subduration)) {\r
+ /*done with file (next segment will exceppe of more than half the requested subduration : store all fragmenters state and abord*/\r
break;\r
}\r
\r
}\r
}\r
}\r
+\r
gf_isom_get_sample_padding_bits(input, tf->OriginalTrack, tf->SampleNum+1, &NbBits);\r
\r
next = gf_isom_get_sample(input, tf->OriginalTrack, tf->SampleNum + 2, &j);\r
split_at_rap = 1;\r
/*force new segment*/\r
force_switch_segment = 1;\r
+ stop_frag = 1;\r
}\r
\r
if (tf->all_sample_raps) {\r
if (split_seg_at_rap && SegmentDuration && (SegmentDuration + MaxFragmentDuration >= MaxSegmentDuration)) {\r
if (!split_at_rap) {\r
split_at_rap = 1;\r
- MaxFragmentDuration = MaxSegmentDuration - SegmentDuration;\r
}\r
}\r
}\r
\r
dur = (Double) gf_isom_get_track_duration(in, i+1);\r
dur /= gf_isom_get_timescale(in);\r
+ if (!dur) {\r
+ dur = (Double) gf_isom_get_media_duration(in, i+1);\r
+ dur /= gf_isom_get_media_timescale(in, i+1);\r
+ }\r
if (dur > input->duration) input->duration = dur;\r
\r
input->components[input->nb_components].ID = gf_isom_get_track_id(in, i+1);\r
return GF_NOT_SUPPORTED;\r
}\r
\r
-static GF_Err write_mpd_header(FILE *mpd, const char *mpd_name, GF_Config *dash_ctx, GF_DashProfile profile, Bool is_mpeg2, const char *title, const char *source, const char *copyright, const char *moreInfoURL, const char **mpd_base_urls, u32 nb_mpd_base_urls, Bool dash_dynamic, u32 time_shift_depth, Double mpd_duration, Double mpd_update_period)\r
+static GF_Err write_mpd_header(FILE *mpd, const char *mpd_name, GF_Config *dash_ctx, GF_DashProfile profile, Bool is_mpeg2, const char *title, const char *source, const char *copyright, const char *moreInfoURL, const char **mpd_base_urls, u32 nb_mpd_base_urls, Bool dash_dynamic, u32 time_shift_depth, Double mpd_duration, Double mpd_update_period, Double min_buffer, u32 ast_shift_sec)\r
{\r
u32 h, m, i;\r
Double s;\r
fprintf(mpd, "<!-- MPD file Generated with GPAC version "GPAC_FULL_VERSION" -->\n");\r
\r
/*TODO what should we put for minBufferTime */\r
- fprintf(mpd, "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\" minBufferTime=\"PT1.5S\" type=\"%s\"", dash_dynamic ? "dynamic" : "static"); \r
+ fprintf(mpd, "<MPD xmlns=\"urn:mpeg:dash:schema:mpd:2011\" minBufferTime=\"PT%fS\" type=\"%s\"", min_buffer, dash_dynamic ? "dynamic" : "static"); \r
if (dash_dynamic) {\r
#ifndef _WIN32_WCE\r
u32 sec, frac;\r
sscanf(opt, "%u", &sec);\r
}\r
gtime = sec - GF_NTP_SEC_1900_TO_1970;\r
+ gtime += ast_shift_sec;\r
t = gmtime(>ime);\r
fprintf(mpd, " availabilityStartTime=\"%d-%02d-%02dT%02d:%02d:%02dZ\"", 1900+t->tm_year, t->tm_mon+1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec);\r
#endif\r
Bool use_url_template, Bool single_segment, Bool single_file, GF_DashSwitchingMode bitstream_switching, \r
Bool seg_at_rap, Double dash_duration, char *seg_name, char *seg_ext,\r
Double frag_duration, s32 subsegs_per_sidx, Bool daisy_chain_sidx, Bool frag_at_rap, const char *tmpdir,\r
- GF_Config *dash_ctx, u32 dash_dynamic, u32 mpd_update_time, u32 time_shift_depth, Double subduration)\r
+ GF_Config *dash_ctx, u32 dash_dynamic, u32 mpd_update_time, u32 time_shift_depth, Double subduration, Double min_buffer, u32 ast_shift_sec)\r
{\r
u32 i, j, segment_mode;\r
char *sep, szSegName[GF_MAX_PATH], szSolvedSegName[GF_MAX_PATH], szTempMPD[GF_MAX_PATH];\r
const char *opt = gf_cfg_get_key(dash_ctx, "DASH", "MaxSegmentDuration");\r
if (opt) {\r
Double seg_dur = atof(opt);\r
+/*\r
if (seg_dur != dash_duration) {\r
return GF_NOT_SUPPORTED;\r
}\r
+*/\r
+ dash_duration = seg_dur;\r
} else {\r
char sOpt[100];\r
sprintf(sOpt, "%f", dash_duration);\r
strcpy(szTempMPD, mpdfile);\r
if (dash_dynamic) strcat(szTempMPD, ".tmp");\r
\r
- mpd = gf_f64_open(mpdfile, "wt");\r
+ mpd = gf_f64_open(szTempMPD, "wt");\r
if (!mpd) {\r
GF_LOG(GF_LOG_INFO, GF_LOG_AUTHOR, ("[MPD] Cannot open MPD file %s for writing\n", szTempMPD));\r
return GF_IO_ERR;\r
\r
dash_opts.mpd = mpd;\r
\r
- e = write_mpd_header(mpd, mpdfile, dash_ctx, dash_profile, has_mpeg2, mpd_title, mpd_source, mpd_copyright, mpd_moreInfoURL, (const char **) mpd_base_urls, nb_mpd_base_urls, dash_dynamic, time_shift_depth, presentation_duration, mpd_update_time);\r
+ e = write_mpd_header(mpd, mpdfile, dash_ctx, dash_profile, has_mpeg2, mpd_title, mpd_source, mpd_copyright, mpd_moreInfoURL, (const char **) mpd_base_urls, nb_mpd_base_urls, dash_dynamic, time_shift_depth, presentation_duration, mpd_update_time, min_buffer, ast_shift_sec);\r
if (e) goto exit;\r
\r
for (cur_period=0; cur_period<max_period; cur_period++) {\r
/*to sync looping sessions with tracks of # length*/
u32 duration_ms;
+
+ /*base track if this stream contains a media decoding dependancy, 0 otherwise*/
+ u32 base_track;
};
FILE *sdp_out;
char filename[GF_MAX_PATH];
char sdpLine[20000];
+ u32 t, count;
+ u8 *payload_type;
strcpy(filename, sdpfilename ? sdpfilename : "videosession.sdp");
sdp_out = gf_f64_open(filename, "wt");
fprintf(sdp_out, "%s\n", sdpLine);
sprintf(sdpLine, "t=0 0");
fprintf(sdp_out, "%s\n", sdpLine);
- sprintf(sdpLine, "a=x-copyright: Streamed with GPAC (C)2000-200X - http://gpac.sourceforge.net\n");
+ sprintf(sdpLine, "a=x-copyright: Streamed with GPAC (C)2000-200X - http://gpac.sourceforge.net");
fprintf(sdp_out, "%s\n", sdpLine);
+ if (streamer->base_track)
+ {
+ sprintf(sdpLine, "a=group:DDP L%d", streamer->base_track);
+ fprintf(sdp_out, "%s", sdpLine);
+ count = gf_isom_get_track_count(streamer->isom);
+ for (t = 0; t < count; t++)
+ {
+ if (gf_isom_has_track_reference(streamer->isom, t+1, GF_ISOM_REF_BASE, gf_isom_get_track_id(streamer->isom, streamer->base_track)))
+ {
+ sprintf(sdpLine, " L%d", t+1);
+ fprintf(sdp_out, "%s", sdpLine);
+ }
+ }
+ fprintf(sdp_out, "\n");
+ }
}
+ /*prepare array of payload type*/
+ count = gf_isom_get_track_count(streamer->isom);
+ payload_type = (u8 *)gf_malloc(count * sizeof(u8));
+ track = streamer->stream;
+ while (track) {
+ payload_type[track->track_num-1] = gf_rtp_streamer_get_payload_type(track->rtp);
+ track = track->next;
+ }
+
+
track = streamer->stream;
while (track) {
char *sdp_media=NULL;
/*TODO retrieve DIMS content encoding from track to set the flags */
gf_rtp_streamer_append_sdp_extended(track->rtp, gf_isom_get_track_id(streamer->isom, track->track_num), dsi, dsi_len, streamer->isom, track->track_num, (char *)KMS, w, h, &sdp_media);
+ if (streamer->base_track)
+ gf_rtp_streamer_append_sdp_decoding_dependency(streamer->isom, track->track_num, payload_type, &sdp_media);
if (sdp_media) {
fprintf(sdp_out, "%s", sdp_media);
gf_free(sdp_media);
(*out_sdp_buffer)[size]=0;
}
+ gf_free(payload_type);
return GF_OK;
}
u16 first_port;
u32 nb_tracks;
u32 sess_data_size;
+ u32 base_track;
if (!ip_dest) ip_dest = "127.0.0.1";
if (!port) port = 7000;
case GF_ISOM_SUBTYPE_AVC4_H264:
case GF_ISOM_SUBTYPE_SVC_H264:
{
- GF_AVCConfig *avcc = gf_isom_avc_config_get(streamer->isom, track->track_num, 1);
- track->avc_nalu_size = avcc->nal_unit_size;
- gf_odf_avc_cfg_del(avcc);
- streamType = GF_STREAM_VISUAL;
- oti = GPAC_OTI_VIDEO_AVC;
+ GF_AVCConfig *avcc, *svcc;
+ avcc = gf_isom_avc_config_get(streamer->isom, track->track_num, 1);
+ if (avcc)
+ {
+ track->avc_nalu_size = avcc->nal_unit_size;
+ gf_odf_avc_cfg_del(avcc);
+ streamType = GF_STREAM_VISUAL;
+ oti = GPAC_OTI_VIDEO_AVC;
+ }
+ svcc = gf_isom_svc_config_get(streamer->isom, track->track_num, 1);
+ if (svcc)
+ {
+ track->avc_nalu_size = svcc->nal_unit_size;
+ gf_odf_avc_cfg_del(svcc);
+ streamType = GF_STREAM_VISUAL;
+ oti = GPAC_OTI_VIDEO_SVC;
+ }
+ break;
}
break;
default:
payt++;
track->microsec_ts_scale = 1000000;
track->microsec_ts_scale /= gf_isom_get_media_timescale(streamer->isom, track->track_num);
+
+ /*does this stream have the decoding dependency ?*/
+ gf_isom_get_reference(streamer->isom, track->track_num, GF_ISOM_REF_BASE, 1, &base_track);
+ if (base_track)
+ streamer->base_track = base_track;
}
return streamer;
if (ok) {
u64 res;
struct tm _t;
+ memset(&_t, 0, sizeof(struct tm));
_t.tm_year = (year > 1900) ? year - 1900 : 0;
_t.tm_mon = month ? month - 1 : 0;
_t.tm_mday = day;
_t.tm_hour = h;
_t.tm_min = m;
_t.tm_sec = (u32) s;
- res = mktime(&_t);
+
+#ifdef GPAC_ANDROID
+ {
+ /*FIXME - finad a safe way to estimate timezone this does not work !!*/
+ s32 t_timezone;
+ struct tm t_gmt, t_local;
+ time_t t_time;
+ t_time = time(NULL);
+ t_gmt = *gmtime(&t_time);
+ t_local = *localtime(&t_time);
+
+ t_timezone = (t_gmt.tm_hour - t_local.tm_hour) * 3600;
+ res = mktime(&_t) - t_timezone;
+ }
+#else
+ res = mktime(&_t) - timezone;
+#endif
if (om || oh) {
s32 diff = (60*oh + om)*60;
if (neg_time_zone) diff = -diff;
GF_EXPORT
#if (JS_VERSION>=185)
-#ifdef USE_FFDEV_15
+#if defined(USE_FFDEV_18)
+JSBool gf_sg_js_has_instance(JSContext *c, JSHandleObject obj, JSMutableHandleValue __val, JSBool *vp)
+#elif defined(USE_FFDEV_15)
JSBool gf_sg_js_has_instance(JSContext *c, JSHandleObject obj,const jsval *val, JSBool *vp)
#else
JSBool gf_sg_js_has_instance(JSContext *c, JSObject *obj,const jsval *val, JSBool *vp)
JSBool gf_sg_js_has_instance(JSContext *c, JSObject *obj, jsval val, JSBool *vp)
#endif
{
+#ifdef USE_FFDEV_18
+ jsval *val = __val._;
+#endif
*vp = JS_FALSE;
#if (JS_VERSION>=185)
if (val && JSVAL_IS_OBJECT(*val)) {
GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because no more input data\n", codec->decio->module_name));
return GF_OK;
}
+ now = gf_term_get_time(codec->odm->term) - entryTime;
/*escape from decoding loop only if above critical limit - this is to avoid starvation on audio*/
if (!ch->esd->dependsOnESID && (codec->CB->UnitCount > codec->CB->Min)) {
- now = gf_term_get_time(codec->odm->term);
- if (now - entryTime >= TimeAvailable) {
- GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because time is up: %d vs %d available\n", codec->decio->module_name, now - entryTime, TimeAvailable));
+ if (now >= TimeAvailable) {
+ GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because time is up: %d vs %d available\n", codec->decio->module_name, now, TimeAvailable));
return GF_OK;
}
+ } else if (now >= 10*TimeAvailable) {
+ GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[%s] Exit decode loop because running for too long: %d vs %d available\n", codec->decio->module_name, now, TimeAvailable));
+ return GF_OK;
}
Decoder_GetNextAU(codec, &ch, &AU);
if (!ch || !AU) return GF_OK;
gf_free(keyValue);
return NULL;
}
+
+GF_EXPORT
+GF_Err gf_cfg_set_filename(GF_Config *iniFile, const char * fileName)
+{
+ if (!fileName) return GF_OK;
+ if (iniFile->fileName) gf_free(iniFile->fileName);
+ iniFile->fileName = gf_strdup(fileName);
+ return iniFile->fileName ? GF_OK : GF_OUT_OF_MEM;
+}