in applications/mp42ts/main.c [2187:2754]
int main(int argc, char **argv)
{
/********************/
/* declarations */
/********************/
const char *ts_pck;
char *ts_pack_buffer = NULL;
GF_Err e;
u32 run_time;
Bool real_time, single_au_pes, is_stdout;
s64 pcr_init_val = -1;
u32 usec_till_next, ttl, split_rap, sdt_refresh_rate;
u32 i, j, mux_rate, nb_sources, cur_pid, carrousel_rate, last_print_time, last_video_time, bifs_use_pes, psi_refresh_rate, nb_pck_pack, nb_pck_in_pack, pcr_ms;
char *ts_out = NULL, *udp_out = NULL, *rtp_out = NULL, *audio_input_ip = NULL;
FILE *ts_output_file = NULL;
GF_Socket *ts_output_udp_sk = NULL, *audio_input_udp_sk = NULL;
#ifndef GPAC_DISABLE_STREAMING
GF_RTPChannel *ts_output_rtp = NULL;
GF_RTSPTransport tr;
GF_RTPHeader hdr;
#endif
char *video_buffer;
u32 video_buffer_size;
u16 output_port = 0, audio_input_port = 0;
u32 output_type, audio_input_type, pcr_offset;
char *audio_input_buffer = NULL;
u32 audio_input_buffer_length=65536;
char *bifs_src_name;
const char *insert_temi = 0;
M2TSSource sources[MAX_MUX_SRC_PROG];
u32 segment_duration, segment_index, segment_number;
char segment_manifest_default[GF_MAX_PATH];
char *segment_manifest, *segment_http_prefix, *segment_dir;
char segment_prefix[GF_MAX_PATH];
char segment_name[GF_MAX_PATH];
const char *ip_ifce = NULL;
GF_M2TS_Time prev_seg_time;
GF_M2TS_Mux *muxer;
/*****************/
/* gpac init */
/*****************/
gf_sys_init(GF_FALSE);
gf_log_set_tool_level(GF_LOG_ALL, GF_LOG_WARNING);
/***********************/
/* initialisations */
/***********************/
real_time = 0;
is_stdout = 0;
ts_output_file = NULL;
video_buffer = NULL;
last_video_time = 0;
audio_input_type = 0;
sdt_refresh_rate = 0;
ts_output_udp_sk = NULL;
udp_out = NULL;
#ifndef GPAC_DISABLE_STREAMING
ts_output_rtp = NULL;
rtp_out = NULL;
#endif
ts_out = NULL;
bifs_src_name = NULL;
nb_sources = 0;
mux_rate = 0;
run_time = 0;
carrousel_rate = 500;
output_port = 1234;
segment_duration = 0;
segment_number = 10; /* by default, we keep the 10 previous segments */
segment_index = 0;
segment_manifest = NULL;
segment_http_prefix = NULL;
segment_dir = NULL;
prev_seg_time.sec = 0;
prev_seg_time.nanosec = 0;
video_buffer_size = 0;
nb_pck_pack = 1;
pcr_ms = 100;
#ifndef GPAC_DISABLE_PLAYER
aac_reader = AAC_Reader_new();
#endif
muxer = NULL;
single_au_pes = 0;
bifs_use_pes = 0;
split_rap = 0;
ttl = 1;
psi_refresh_rate = GF_M2TS_PSI_DEFAULT_REFRESH_RATE;
pcr_offset = (u32) -1;
/***********************/
/* parse arguments */
/***********************/
if (GF_OK != parse_args(argc, argv, &mux_rate, &carrousel_rate, &pcr_init_val, &pcr_offset, &psi_refresh_rate, &single_au_pes, &bifs_use_pes, sources, &nb_sources, &bifs_src_name,
&real_time, &run_time, &video_buffer, &video_buffer_size,
&audio_input_type, &audio_input_ip, &audio_input_port,
&output_type, &ts_out, &udp_out, &rtp_out, &output_port,
&segment_dir, &segment_duration, &segment_manifest, &segment_number, &segment_http_prefix, &split_rap, &nb_pck_pack, &pcr_ms, &ttl, &ip_ifce, &insert_temi, &sdt_refresh_rate)) {
goto exit;
}
if (run_time && !mux_rate) {
fprintf(stderr, "Cannot specify TS run time for VBR multiplex - disabling run time\n");
run_time = 0;
}
/***************************/
/* create mp42ts muxer */
/***************************/
muxer = gf_m2ts_mux_new(mux_rate, psi_refresh_rate, real_time);
if (!muxer) {
fprintf(stderr, "Could not create the muxer. Aborting.\n");
goto exit;
}
gf_m2ts_mux_use_single_au_pes_mode(muxer, single_au_pes);
if (pcr_init_val>=0) gf_m2ts_mux_set_initial_pcr(muxer, (u64) pcr_init_val);
gf_m2ts_mux_set_pcr_max_interval(muxer, pcr_ms);
if (ts_out != NULL) {
if (segment_duration) {
strcpy(segment_prefix, ts_out);
if (segment_dir) {
if (strchr("\\/", segment_name[strlen(segment_name)-1])) {
sprintf(segment_name, "%s%s_%d.ts", segment_dir, segment_prefix, segment_index);
} else {
sprintf(segment_name, "%s/%s_%d.ts", segment_dir, segment_prefix, segment_index);
}
} else {
sprintf(segment_name, "%s_%d.ts", segment_prefix, segment_index);
}
ts_out = gf_strdup(segment_name);
if (!segment_manifest) {
sprintf(segment_manifest_default, "%s.m3u8", segment_prefix);
segment_manifest = segment_manifest_default;
}
//write_manifest(segment_manifest, segment_dir, segment_duration, segment_prefix, segment_http_prefix, segment_index, 0, 0);
}
if (!strcmp(ts_out, "stdout") || !strcmp(ts_out, "-") ) {
ts_output_file = stdout;
is_stdout = GF_TRUE;
} else {
ts_output_file = gf_fopen(ts_out, "wb");
is_stdout = GF_FALSE;
}
if (!ts_output_file) {
fprintf(stderr, "Error opening %s\n", ts_out);
goto exit;
}
}
if (udp_out != NULL) {
ts_output_udp_sk = gf_sk_new(GF_SOCK_TYPE_UDP);
if (gf_sk_is_multicast_address((char *)udp_out)) {
e = gf_sk_setup_multicast(ts_output_udp_sk, (char *)udp_out, output_port, ttl, 0, (char *) ip_ifce);
} else {
e = gf_sk_bind(ts_output_udp_sk, ip_ifce, output_port, (char *)udp_out, output_port, GF_SOCK_REUSE_PORT);
}
if (e) {
fprintf(stderr, "Error initializing UDP socket: %s\n", gf_error_to_string(e));
goto exit;
}
}
#ifndef GPAC_DISABLE_STREAMING
if (rtp_out != NULL) {
ts_output_rtp = gf_rtp_new();
gf_rtp_set_ports(ts_output_rtp, output_port);
memset(&tr, 0, sizeof(GF_RTSPTransport));
tr.IsUnicast = gf_sk_is_multicast_address((char *)rtp_out) ? 0 : 1;
tr.Profile="RTP/AVP";
tr.destination = (char *)rtp_out;
tr.source = "0.0.0.0";
tr.IsRecord = 0;
tr.Append = 0;
tr.SSRC = rand();
tr.port_first = output_port;
tr.port_last = output_port+1;
if (tr.IsUnicast) {
tr.client_port_first = output_port;
tr.client_port_last = output_port+1;
} else {
tr.source = (char *)rtp_out;
tr.TTL = ttl;
}
e = gf_rtp_setup_transport(ts_output_rtp, &tr, (char *)ts_out);
if (e != GF_OK) {
fprintf(stderr, "Cannot setup RTP transport info : %s\n", gf_error_to_string(e));
goto exit;
}
e = gf_rtp_initialize(ts_output_rtp, 0, 1, 1500, 0, 0, (char *) ip_ifce);
if (e != GF_OK) {
fprintf(stderr, "Cannot initialize RTP sockets : %s\n", gf_error_to_string(e));
goto exit;
}
memset(&hdr, 0, sizeof(GF_RTPHeader));
hdr.Version = 2;
hdr.PayloadType = 33; /*MP2T*/
hdr.SSRC = tr.SSRC;
hdr.Marker = 0;
}
#endif /*GPAC_DISABLE_STREAMING*/
/************************************/
/* create streaming audio input */
/************************************/
if (audio_input_ip)
switch(audio_input_type) {
case GF_MP42TS_UDP:
audio_input_udp_sk = gf_sk_new(GF_SOCK_TYPE_UDP);
if (gf_sk_is_multicast_address((char *)audio_input_ip)) {
e = gf_sk_setup_multicast(audio_input_udp_sk, (char *)audio_input_ip, audio_input_port, 32, 0, NULL);
} else {
e = gf_sk_bind(audio_input_udp_sk, NULL, audio_input_port, (char *)audio_input_ip, audio_input_port, GF_SOCK_REUSE_PORT);
}
if (e) {
fprintf(stderr, "Error initializing UDP socket for %s:%d : %s\n", audio_input_ip, audio_input_port, gf_error_to_string(e));
goto exit;
}
gf_sk_set_buffer_size(audio_input_udp_sk, 0, GF_M2TS_UDP_BUFFER_SIZE);
gf_sk_set_block_mode(audio_input_udp_sk, 0);
/*allocate data buffer*/
audio_input_buffer = (char*)gf_malloc(audio_input_buffer_length);
assert(audio_input_buffer);
break;
case GF_MP42TS_RTP:
/*TODO: not implemented*/
assert(0);
break;
#ifndef GPAC_DISABLE_PLAYER
case GF_MP42TS_HTTP:
audio_prog = (void*)&sources[nb_sources-1];
aac_download_file(aac_reader, audio_input_ip);
break;
#endif
case GF_MP42TS_FILE:
assert(0); /*audio live input is restricted to realtime/streaming*/
break;
default:
assert(0);
}
if (!nb_sources) {
fprintf(stderr, "No program to mux, quitting.\n");
}
for (i=0; i<nb_sources; i++) {
if (!sources[i].ID) {
for (j=i+1; j<nb_sources; j++) {
if (sources[i].ID < sources[j].ID) sources[i].ID = sources[i].ID+1;
}
if (!sources[i].ID) sources[i].ID = 1;
}
}
/****************************************/
/* declare all streams to the muxer */
/****************************************/
cur_pid = 100; /*PIDs start from 100*/
for (i=0; i<nb_sources; i++) {
GF_M2TS_Mux_Program *program;
if (! sources[i].is_not_program_declaration) {
u32 prog_pcr_offset = 0;
if (pcr_offset==(u32)-1) {
if (sources[i].max_sample_size && mux_rate) {
Double r = sources[i].max_sample_size * 8;
r *= 90000;
r/= mux_rate;
//add 10% of safety to cover TS signaling and other potential table update while sending the largest PES
r *= 1.1;
prog_pcr_offset = (u32) r;
}
} else {
prog_pcr_offset = pcr_offset;
}
fprintf(stderr, "Setting up program ID %d - send rates: PSI %d ms PCR %d ms - PCR offset %d\n", sources[i].ID, psi_refresh_rate, pcr_ms, prog_pcr_offset);
program = gf_m2ts_mux_program_add(muxer, sources[i].ID, cur_pid, psi_refresh_rate, prog_pcr_offset, sources[i].mpeg4_signaling);
if (sources[i].mpeg4_signaling) program->iod = sources[i].iod;
if (sources[i].od_updates) {
program->loop_descriptors = sources[i].od_updates;
sources[i].od_updates = NULL;
}
} else {
program = gf_m2ts_mux_program_find(muxer, sources[i].ID);
}
if (!program) continue;
for (j=0; j<sources[i].nb_streams; j++) {
GF_M2TS_Mux_Stream *stream;
Bool force_pes_mode = 0;
/*likely an OD stream disabled*/
if (!sources[i].streams[j].stream_type) continue;
if (sources[i].streams[j].stream_type==GF_STREAM_SCENE) force_pes_mode = bifs_use_pes ? 1 : 0;
stream = gf_m2ts_program_stream_add(program, &sources[i].streams[j], cur_pid+j+1, (sources[i].pcr_idx==j) ? 1 : 0, force_pes_mode);
if (split_rap && (sources[i].streams[j].stream_type==GF_STREAM_VISUAL)) stream->start_pes_at_rap = 1;
}
cur_pid += sources[i].nb_streams;
while (cur_pid % 10)
cur_pid ++;
if (sources[i].program_name[0] || sources[i].provider_name[0] ) gf_m2ts_mux_program_set_name(program, sources[i].program_name, sources[i].provider_name);
}
muxer->flush_pes_at_rap = (split_rap == 2) ? GF_TRUE : GF_FALSE;
if (sdt_refresh_rate) {
gf_m2ts_mux_enable_sdt(muxer, sdt_refresh_rate);
}
gf_m2ts_mux_update_config(muxer, 1);
if (nb_pck_pack>1) {
ts_pack_buffer = gf_malloc(sizeof(char) * 188 * nb_pck_pack);
}
/*****************/
/* main loop */
/*****************/
last_print_time = gf_sys_clock();
while (run) {
u32 status;
/*check for some audio input from the network*/
if (audio_input_ip) {
u32 read;
switch (audio_input_type) {
case GF_MP42TS_UDP:
case GF_MP42TS_RTP:
/*e =*/
gf_sk_receive(audio_input_udp_sk, audio_input_buffer, audio_input_buffer_length, 0, &read);
if (read) {
SampleCallBack((void*)&sources[nb_sources-1], AUDIO_DATA_ESID, audio_input_buffer, read, gf_m2ts_get_sys_clock(muxer));
}
break;
#ifndef GPAC_DISABLE_PLAYER
case GF_MP42TS_HTTP:
/*nothing to do: AAC_OnLiveData is called automatically*/
/*check we're still alive*/
if (gf_dm_is_thread_dead(aac_reader->dnload)) {
GF_ESD *esd;
aac_download_file(aac_reader, audio_input_ip);
esd = AAC_GetESD(aac_reader);
if (!esd)
break;
assert(esd->slConfig->timestampResolution); /*if we don't have this value we won't be able to adjust the timestamps within the MPEG2-TS*/
if (esd->slConfig->timestampResolution)
audio_discontinuity_offset = gf_m2ts_get_sys_clock(muxer) * (u64)esd->slConfig->timestampResolution / 1000;
gf_odf_desc_del((GF_Descriptor *)esd);
}
break;
#endif
default:
assert(0);
}
}
/*flush all packets*/
nb_pck_in_pack=0;
while ((ts_pck = gf_m2ts_mux_process(muxer, &status, &usec_till_next)) != NULL) {
if (ts_pack_buffer ) {
memcpy(ts_pack_buffer + 188 * nb_pck_in_pack, ts_pck, 188);
nb_pck_in_pack++;
if (nb_pck_in_pack < nb_pck_pack)
continue;
ts_pck = (const char *) ts_pack_buffer;
} else {
nb_pck_in_pack = 1;
}
call_flush:
if (ts_output_file != NULL) {
gf_fwrite(ts_pck, 1, 188 * nb_pck_in_pack, ts_output_file);
if (segment_duration && (muxer->time.sec > prev_seg_time.sec + segment_duration)) {
prev_seg_time = muxer->time;
gf_fclose(ts_output_file);
segment_index++;
if (segment_dir) {
if (strchr("\\/", segment_name[strlen(segment_name)-1])) {
sprintf(segment_name, "%s%s_%d.ts", segment_dir, segment_prefix, segment_index);
} else {
sprintf(segment_name, "%s/%s_%d.ts", segment_dir, segment_prefix, segment_index);
}
} else {
sprintf(segment_name, "%s_%d.ts", segment_prefix, segment_index);
}
ts_output_file = gf_fopen(segment_name, "wb");
if (!ts_output_file) {
fprintf(stderr, "Error opening %s\n", segment_name);
goto exit;
}
/* delete the oldest segment */
if (segment_number && ((s32) (segment_index - segment_number - 1) >= 0)) {
char old_segment_name[GF_MAX_PATH];
if (segment_dir) {
if (strchr("\\/", segment_name[strlen(segment_name)-1])) {
sprintf(old_segment_name, "%s%s_%d.ts", segment_dir, segment_prefix, segment_index - segment_number - 1);
} else {
sprintf(old_segment_name, "%s/%s_%d.ts", segment_dir, segment_prefix, segment_index - segment_number - 1);
}
} else {
sprintf(old_segment_name, "%s_%d.ts", segment_prefix, segment_index - segment_number - 1);
}
gf_delete_file(old_segment_name);
}
write_manifest(segment_manifest, segment_dir, segment_duration, segment_prefix, segment_http_prefix,
// (segment_index >= segment_number/2 ? segment_index - segment_number/2 : 0), segment_index >1 ? segment_index-1 : 0, 0);
( (segment_index > segment_number ) ? segment_index - segment_number : 0), segment_index >1 ? segment_index-1 : 0, 0);
}
}
if (ts_output_udp_sk != NULL) {
e = gf_sk_send(ts_output_udp_sk, (char*)ts_pck, 188 * nb_pck_in_pack);
if (e) {
fprintf(stderr, "Error %s sending UDP packet\n", gf_error_to_string(e));
}
}
#ifndef GPAC_DISABLE_STREAMING
if (ts_output_rtp != NULL) {
u32 ts;
hdr.SequenceNumber++;
/*muxer clock at 90k*/
ts = muxer->time.sec*90000 + muxer->time.nanosec*9/100000;
/*FIXME - better discontinuity check*/
hdr.Marker = (ts < hdr.TimeStamp) ? 1 : 0;
hdr.TimeStamp = ts;
e = gf_rtp_send_packet(ts_output_rtp, &hdr, (char*)ts_pck, 188 * nb_pck_in_pack, 0);
if (e) {
fprintf(stderr, "Error %s sending RTP packet\n", gf_error_to_string(e));
}
}
#endif
nb_pck_in_pack = 0;
if (status>=GF_M2TS_STATE_PADDING) {
break;
}
}
if (nb_pck_in_pack) {
ts_pck = (const char *) ts_pack_buffer;
goto call_flush;
}
/*push video*/
{
u32 now=gf_sys_clock();
if (now/MP42TS_VIDEO_FREQ != last_video_time/MP42TS_VIDEO_FREQ) {
/*should use carrousel behaviour instead of being pushed manually*/
if (video_buffer)
SampleCallBack((void*)&sources[nb_sources-1], VIDEO_DATA_ESID, video_buffer, video_buffer_size, gf_m2ts_get_sys_clock(muxer)+1000/*try buffering due to VLC msg*/);
last_video_time = now;
}
}
if (real_time) {
/*refresh every MP42TS_PRINT_TIME_MS ms*/
u32 now=gf_sys_clock();
if (now > last_print_time + MP42TS_PRINT_TIME_MS) {
last_print_time = now;
fprintf(stderr, "M2TS: time % 6d - TS time % 6d - avg bitrate % 8d\r", gf_m2ts_get_sys_clock(muxer), gf_m2ts_get_ts_clock(muxer), muxer->average_birate_kbps);
if (gf_prompt_has_input()) {
char c = gf_prompt_get_char();
if (c=='q') break;
}
}
if (status == GF_M2TS_STATE_IDLE) {
#if 0
/*wait till next packet is ready to be sent*/
if (usec_till_next>1000) {
//fprintf(stderr, "%d usec till next packet\n", usec_till_next);
gf_sleep(usec_till_next / 1000);
}
#else
//we don't have enough precision on usec counting and we end up eating one core on most machines, so let's just sleep
//one second whenever we are idle - it's maybe too much but the muxer will catchup afterwards
gf_sleep(1);
#endif
}
}
if (run_time) {
if (gf_m2ts_get_ts_clock(muxer) > run_time) {
fprintf(stderr, "Stopping multiplex at %d ms (requested runtime %d ms)\n", gf_m2ts_get_ts_clock(muxer), run_time);
break;
}
}
if (status==GF_M2TS_STATE_EOS) {
break;
}
}
{
u64 bits = muxer->tot_pck_sent*8*188;
u32 dur_sec = gf_m2ts_get_ts_clock(muxer) / 1000;
if (!dur_sec) dur_sec = 1;
fprintf(stderr, "Done muxing - %d sec - average rate %d kbps "LLD" packets written\n", dur_sec, (u32) (bits/dur_sec/1000), muxer->tot_pck_sent);
fprintf(stderr, "\tPadding: "LLD" packets - "LLD" PES padded bytes (%g kbps)\n", muxer->tot_pad_sent, muxer->tot_pes_pad_bytes, (Double) (muxer->tot_pes_pad_bytes*8.0/dur_sec/1000) );
}
exit:
if (ts_pack_buffer) gf_free(ts_pack_buffer);
run = 0;
if (segment_duration) {
write_manifest(segment_manifest, segment_dir, segment_duration, segment_prefix, segment_http_prefix, segment_index - segment_number, segment_index, 1);
}
if (ts_output_file && !is_stdout) gf_fclose(ts_output_file);
if (ts_output_udp_sk) gf_sk_del(ts_output_udp_sk);
#ifndef GPAC_DISABLE_STREAMING
if (ts_output_rtp) gf_rtp_del(ts_output_rtp);
#endif
if (ts_out) gf_free(ts_out);
if (audio_input_udp_sk) gf_sk_del(audio_input_udp_sk);
if (audio_input_buffer) gf_free (audio_input_buffer);
if (video_buffer) gf_free(video_buffer);
if (udp_out) gf_free(udp_out);
#ifndef GPAC_DISABLE_STREAMING
if (rtp_out) gf_free(rtp_out);
#endif
if (muxer) gf_m2ts_mux_del(muxer);
for (i=0; i<nb_sources; i++) {
for (j=0; j<sources[i].nb_streams; j++) {
if (sources[i].streams[j].input_ctrl) sources[i].streams[j].input_ctrl(&sources[i].streams[j], GF_ESI_INPUT_DESTROY, NULL);
if (sources[i].streams[j].input_udta) {
gf_free(sources[i].streams[j].input_udta);
}
if (sources[i].streams[j].decoder_config) {
gf_free(sources[i].streams[j].decoder_config);
}
if (sources[i].streams[j].sl_config) {
gf_free(sources[i].streams[j].sl_config);
}
}
if (sources[i].iod) gf_odf_desc_del((GF_Descriptor*)sources[i].iod);
#ifndef GPAC_DISABLE_ISOM
if (sources[i].mp4) gf_isom_close(sources[i].mp4);
#endif
#ifndef GPAC_DISABLE_SENG
if (sources[i].seng) {
gf_seng_terminate(sources[i].seng);
sources[i].seng = NULL;
}
#endif
if (sources[i].th) gf_th_del(sources[i].th);
}
#ifndef GPAC_DISABLE_PLAYER
if (aac_reader) AAC_Reader_del(aac_reader);
#endif
if (logfile) gf_fclose(logfile);
gf_sys_close();
#ifdef GPAC_MEMORY_TRACKING
if (enable_mem_tracker && (gf_memory_size() || gf_file_handles_count() )) {
gf_memory_print();
return 2;
}
#endif
return 0;
}