in applications/mp42ts/main.c [1380:1783]
static Bool open_source(M2TSSource *source, char *src, u32 carousel_rate, u32 mpeg4_signaling, char *update, char *audio_input_ip, u16 audio_input_port, char *video_buffer, Bool force_real_time, u32 bifs_use_pes, const char *temi_url, Bool compute_max_size, Bool insert_ntp)
{
#ifndef GPAC_DISABLE_STREAMING
GF_SDPInfo *sdp;
#endif
s64 min_offset = 0;
memset(source, 0, sizeof(M2TSSource));
source->mpeg4_signaling = mpeg4_signaling;
/*open ISO file*/
#ifndef GPAC_DISABLE_ISOM
if (gf_isom_probe_file(src)) {
u32 i;
u32 nb_tracks;
Bool has_bifs_od = 0;
u32 first_audio = 0;
u32 first_other = 0;
source->mp4 = gf_isom_open(src, GF_ISOM_OPEN_READ, 0);
source->nb_streams = 0;
source->real_time = force_real_time;
/*on MPEG-2 TS, carry 3GPP timed text as MPEG-4 Part17*/
gf_isom_text_set_streaming_mode(source->mp4, 1);
nb_tracks = gf_isom_get_track_count(source->mp4);
for (i=0; i<nb_tracks; i++) {
Bool check_deps = 0;
if (gf_isom_get_media_type(source->mp4, i+1) == GF_ISOM_MEDIA_HINT)
continue;
fill_isom_es_ifce(source, &source->streams[i], source->mp4, i+1, bifs_use_pes, compute_max_size);
if (min_offset > ((GF_ESIMP4 *)source->streams[i].input_udta)->ts_offset)
min_offset = ((GF_ESIMP4 *)source->streams[i].input_udta)->ts_offset;
switch(source->streams[i].stream_type) {
case GF_STREAM_OD:
has_bifs_od = 1;
source->streams[i].repeat_rate = carousel_rate;
break;
case GF_STREAM_SCENE:
has_bifs_od = 1;
source->streams[i].repeat_rate = carousel_rate;
break;
case GF_STREAM_VISUAL:
/*turn on image repeat*/
switch (source->streams[i].object_type_indication) {
case GPAC_OTI_IMAGE_JPEG:
case GPAC_OTI_IMAGE_PNG:
((GF_ESIMP4 *)source->streams[i].input_udta)->image_repeat_ms = carousel_rate;
break;
default:
check_deps = 1;
if (gf_isom_get_sample_count(source->mp4, i+1)>1) {
/*get first visual stream as PCR*/
if (!source->pcr_idx) {
source->pcr_idx = i+1;
if (temi_url) {
((GF_ESIMP4 *)source->streams[i].input_udta)->insert_temi = GF_TRUE;
if (insert_ntp)
((GF_ESIMP4 *)source->streams[i].input_udta)->insert_ntp = GF_TRUE;
if (strcmp(temi_url, "NOTEMIURL"))
((GF_ESIMP4 *)source->streams[i].input_udta)->temi_url = temi_url;
}
}
}
break;
}
break;
case GF_STREAM_AUDIO:
if (!first_audio) first_audio = i+1;
check_deps = 1;
break;
default:
/*log not supported stream type: %s*/
break;
}
source->nb_streams++;
if (gf_isom_get_sample_count(source->mp4, i+1)>1) first_other = i+1;
if (check_deps) {
u32 k;
Bool found_dep = 0;
for (k=0; k<nb_tracks; k++) {
if (gf_isom_get_media_type(source->mp4, k+1) != GF_ISOM_MEDIA_OD)
continue;
/*this stream is not refered to by any OD, send as regular PES*/
if (gf_isom_has_track_reference(source->mp4, k+1, GF_ISOM_REF_OD, gf_isom_get_track_id(source->mp4, i+1) )==1) {
found_dep = 1;
break;
}
}
if (!found_dep) {
source->streams[i].caps |= GF_ESI_STREAM_WITHOUT_MPEG4_SYSTEMS;
}
}
}
if (has_bifs_od && !source->mpeg4_signaling) source->mpeg4_signaling = GF_M2TS_MPEG4_SIGNALING_FULL;
/*if no visual PCR found, use first audio*/
if (!source->pcr_idx) source->pcr_idx = first_audio;
if (!source->pcr_idx) source->pcr_idx = first_other;
if (source->pcr_idx) {
GF_ESIMP4 *priv;
source->pcr_idx-=1;
priv = source->streams[source->pcr_idx].input_udta;
gf_isom_set_default_sync_track(source->mp4, priv->track);
}
if (min_offset < 0) {
for (i=0; i<source->nb_streams; i++) {
((GF_ESIMP4 *)source->streams[i].input_udta)->ts_offset += -min_offset;
}
}
source->iod = gf_isom_get_root_od(source->mp4);
if (source->iod) {
GF_ObjectDescriptor*iod = (GF_ObjectDescriptor*)source->iod;
if (gf_list_count( ((GF_ObjectDescriptor*)source->iod)->ESDescriptors) == 0) {
gf_odf_desc_del(source->iod);
source->iod = NULL;
} else {
fprintf(stderr, "IOD found for program %s\n", src);
/*if using 4over2, get rid of OD tracks*/
if (source->mpeg4_signaling==GF_M2TS_MPEG4_SIGNALING_SCENE) {
for (i=0; i<gf_list_count(iod->ESDescriptors); i++) {
u32 track_num, k;
GF_M2TSDescriptor *oddesc;
GF_ISOSample *sample;
GF_ESD *esd = gf_list_get(iod->ESDescriptors, i);
if (esd->decoderConfig->streamType!=GF_STREAM_OD) continue;
track_num = gf_isom_get_track_by_id(source->mp4, esd->ESID);
if (gf_isom_get_sample_count(source->mp4, track_num)>1) continue;
sample = gf_isom_get_sample(source->mp4, track_num, 1, NULL);
if (sample->dataLength >= 255-2) {
gf_isom_sample_del(&sample);
continue;
}
/*rewrite ESD dependencies*/
for (k=0; k<gf_list_count(iod->ESDescriptors); k++) {
GF_ESD *dep_esd = gf_list_get(iod->ESDescriptors, k);
if (dep_esd->dependsOnESID==esd->ESID) dep_esd->dependsOnESID = esd->dependsOnESID;
}
for (k=0; k<source->nb_streams; k++) {
if (source->streams[k].stream_id==esd->ESID) {
source->streams[k].stream_type = 0;
break;
}
}
if (!source->od_updates) source->od_updates = gf_list_new();
GF_SAFEALLOC(oddesc, GF_M2TSDescriptor);
oddesc->data_len = sample->dataLength;
oddesc->data = sample->data;
oddesc->tag = GF_M2TS_MPEG4_ODUPDATE_DESCRIPTOR;
sample->data = NULL;
gf_isom_sample_del(&sample);
gf_list_add(source->od_updates, oddesc);
gf_list_rem(iod->ESDescriptors, i);
i--;
gf_odf_desc_del((GF_Descriptor *) esd);
source->samples_count--;
}
}
}
}
return 1;
}
#endif
#ifndef GPAC_DISABLE_STREAMING
/*open SDP file*/
if (strstr(src, ".sdp")) {
GF_X_Attribute *att;
char *sdp_buf;
u32 sdp_size, i;
GF_Err e;
FILE *_sdp = gf_fopen(src, "rt");
if (!_sdp) {
fprintf(stderr, "Error opening %s - no such file\n", src);
return 0;
}
gf_fseek(_sdp, 0, SEEK_END);
sdp_size = (u32)gf_ftell(_sdp);
gf_fseek(_sdp, 0, SEEK_SET);
sdp_buf = (char*)gf_malloc(sizeof(char)*sdp_size);
memset(sdp_buf, 0, sizeof(char)*sdp_size);
sdp_size = (u32) fread(sdp_buf, 1, sdp_size, _sdp);
gf_fclose(_sdp);
sdp = gf_sdp_info_new();
e = gf_sdp_info_parse(sdp, sdp_buf, sdp_size);
gf_free(sdp_buf);
if (e) {
fprintf(stderr, "Error opening %s : %s\n", src, gf_error_to_string(e));
gf_sdp_info_del(sdp);
return 0;
}
i=0;
while ((att = (GF_X_Attribute*)gf_list_enum(sdp->Attributes, &i))) {
char buf[2000];
u32 size;
char *buf64;
u32 size64;
char *iod_str;
if (strcmp(att->Name, "mpeg4-iod") ) continue;
iod_str = att->Value + 1;
if (strnicmp(iod_str, "data:application/mpeg4-iod;base64", strlen("data:application/mpeg4-iod;base64"))) continue;
buf64 = strstr(iod_str, ",");
if (!buf64) break;
buf64 += 1;
size64 = (u32) strlen(buf64) - 1;
size = gf_base64_decode(buf64, size64, buf, 2000);
gf_odf_desc_read(buf, size, &source->iod);
break;
}
source->nb_streams = gf_list_count(sdp->media_desc);
for (i=0; i<source->nb_streams; i++) {
GF_SDPMedia *media = gf_list_get(sdp->media_desc, i);
fill_rtp_es_ifce(&source->streams[i], media, sdp, source);
switch(source->streams[i].stream_type) {
case GF_STREAM_OD:
case GF_STREAM_SCENE:
source->mpeg4_signaling = GF_M2TS_MPEG4_SIGNALING_FULL;
source->streams[i].repeat_rate = carousel_rate;
break;
}
if (!source->pcr_idx && (source->streams[i].stream_type == GF_STREAM_VISUAL)) {
source->pcr_idx = i+1;
}
}
if (source->pcr_idx) source->pcr_idx-=1;
gf_sdp_info_del(sdp);
return 2;
} else
#endif /*GPAC_DISABLE_STREAMING*/
#ifndef GPAC_DISABLE_SENG
if (strstr(src, ".bt")) //open .bt file
{
u32 i;
u32 load_type=0;
source->seng = gf_seng_init(source, src, load_type, NULL, (load_type == GF_SM_LOAD_DIMS) ? 1 : 0);
if (!source->seng) {
fprintf(stderr, "Cannot create scene engine\n");
exit(1);
}
else {
fprintf(stderr, "Scene engine created.\n");
}
assert( source );
assert( source->seng);
source->iod = gf_seng_get_iod(source->seng);
if (! source->iod) {
fprintf(stderr, __FILE__": No IOD\n");
}
source->nb_streams = gf_seng_get_stream_count(source->seng);
source->rate = carousel_rate;
source->mpeg4_signaling = GF_M2TS_MPEG4_SIGNALING_FULL;
for (i=0; i<source->nb_streams; i++) {
fill_seng_es_ifce(&source->streams[i], i, source->seng, source->rate);
//fprintf(stderr, "Fill interface\n");
if (!source->pcr_idx && (source->streams[i].stream_type == GF_STREAM_AUDIO)) {
source->pcr_idx = i+1;
}
}
/*when an audio input is present, declare it and store OD + ESD_U*/
if (audio_input_ip) {
/*add the audio program*/
source->pcr_idx = source->nb_streams;
source->streams[source->nb_streams].stream_type = GF_STREAM_AUDIO;
/*hack: http urls are not decomposed therefore audio_input_port remains null*/
if (audio_input_port) { /*UDP/RTP*/
source->streams[source->nb_streams].object_type_indication = GPAC_OTI_AUDIO_MPEG1;
} else { /*HTTP*/
aac_reader->oti = source->streams[source->nb_streams].object_type_indication = GPAC_OTI_AUDIO_AAC_MPEG4;
}
source->streams[source->nb_streams].input_ctrl = void_input_ctrl;
source->streams[source->nb_streams].stream_id = AUDIO_DATA_ESID;
source->streams[source->nb_streams].timescale = 1000;
GF_SAFEALLOC(source->streams[source->nb_streams].input_udta, GF_ESIStream);
((GF_ESIStream*)source->streams[source->nb_streams].input_udta)->vers_inc = 1; /*increment version number at every audio update*/
assert( source );
//assert( source->iod);
if (source->iod && ((source->iod->tag!=GF_ODF_IOD_TAG) || (mpeg4_signaling != GF_M2TS_MPEG4_SIGNALING_SCENE))) {
/*create the descriptor*/
GF_ESD *esd;
GF_SimpleDataDescriptor *audio_desc;
GF_SAFEALLOC(audio_desc, GF_SimpleDataDescriptor);
if (audio_input_port) { /*UDP/RTP*/
esd = gf_odf_desc_esd_new(0);
esd->decoderConfig->streamType = source->streams[source->nb_streams].stream_type;
esd->decoderConfig->objectTypeIndication = source->streams[source->nb_streams].object_type_indication;
} else { /*HTTP*/
esd = AAC_GetESD(aac_reader); /*in case of AAC, we have to wait the first ADTS chunk*/
}
assert( esd );
esd->ESID = source->streams[source->nb_streams].stream_id;
if (esd->slConfig->timestampResolution) /*in case of AAC, we have to wait the first ADTS chunk*/
encode_audio_desc(esd, audio_desc);
else
gf_odf_desc_del((GF_Descriptor *)esd);
/*find the audio OD stream and attach its descriptor*/
for (i=0; i<source->nb_streams; i++) {
if (source->streams[i].stream_id == AUDIO_OD_ESID) {
if (source->streams[i].input_udta)
gf_free(source->streams[i].input_udta);
source->streams[i].input_udta = (void*)audio_desc; /*Hack: the real input_udta type (for our SampleCallBack function) is GF_ESIStream*/
audio_OD_stream_id = i;
break;
}
}
if (audio_OD_stream_id == (u32)-1) {
fprintf(stderr, "Error: could not find an audio OD stream with ESID=100 in '%s'\n", src);
return 0;
}
} else {
source->mpeg4_signaling = GF_M2TS_MPEG4_SIGNALING_SCENE;
}
source->nb_streams++;
}
/*when an audio input is present, declare it and store OD + ESD_U*/
if (video_buffer) {
/*add the video program*/
source->streams[source->nb_streams].stream_type = GF_STREAM_VISUAL;
source->streams[source->nb_streams].object_type_indication = GPAC_OTI_VIDEO_AVC;
source->streams[source->nb_streams].input_ctrl = void_input_ctrl;
source->streams[source->nb_streams].stream_id = VIDEO_DATA_ESID;
source->streams[source->nb_streams].timescale = 1000;
GF_SAFEALLOC(source->streams[source->nb_streams].input_udta, GF_ESIStream);
((GF_ESIStream*)source->streams[source->nb_streams].input_udta)->vers_inc = 1; /*increment version number at every video update*/
assert(source);
if (source->iod && ((source->iod->tag!=GF_ODF_IOD_TAG) || (mpeg4_signaling != GF_M2TS_MPEG4_SIGNALING_SCENE))) {
assert(0); /*TODO*/
#if 0
/*create the descriptor*/
GF_ESD *esd;
GF_SimpleDataDescriptor *video_desc;
GF_SAFEALLOC(video_desc, GF_SimpleDataDescriptor);
esd = gf_odf_desc_esd_new(0);
esd->decoderConfig->streamType = source->streams[source->nb_streams].stream_type;
esd->decoderConfig->objectTypeIndication = source->streams[source->nb_streams].object_type_indication;
esd->ESID = source->streams[source->nb_streams].stream_id;
/*find the audio OD stream and attach its descriptor*/
for (i=0; i<source->nb_streams; i++) {
if (source->streams[i].stream_id == 103/*TODO: VIDEO_OD_ESID*/) {
if (source->streams[i].input_udta)
gf_free(source->streams[i].input_udta);
source->streams[i].input_udta = (void*)video_desc;
audio_OD_stream_id = i;
break;
}
}
if (audio_OD_stream_id == (u32)-1) {
fprintf(stderr, "Error: could not find an audio OD stream with ESID=100 in '%s'\n", src);
return 0;
}
#endif
} else {
assert (source->mpeg4_signaling == GF_M2TS_MPEG4_SIGNALING_SCENE);
}
source->nb_streams++;
}
if (!source->pcr_idx) source->pcr_idx=1;
source->th = gf_th_new("Carousel");
source->bifs_src_name = update;
gf_th_run(source->th, seng_output, source);
return 1;
} else
#endif
{
FILE *f = gf_fopen(src, "rt");
if (f) {
gf_fclose(f);
fprintf(stderr, "Error opening %s - not a supported input media, skipping.\n", src);
} else {
fprintf(stderr, "Error opening %s - no such file.\n", src);
}
return 0;
}
}