void JNICALL Java_com_twitter_vireo_transform_jni_Trim_jniInit()

in vireo/scala/jni/vireo/transform.cpp [203:305]


void JNICALL Java_com_twitter_vireo_transform_jni_Trim_jniInit(JNIEnv* env, jobject trim_obj, jobject samples_obj, jobject edit_boxes_obj, jlong start_ms, jlong duration_ms) {
  jni::ExceptionHandler::SafeExecuteFunction(env, [&] {
    auto jni = new _JNITrimStruct();
    auto jni_trim = jni::Wrap(env, trim_obj);
    jni_trim.set<jlong>("jni", (jlong)jni);

    // collect the input samples
    uint64_t index = 0;
    vector<jobject> sample_objs;
    vector<decode::Sample> samples = jni::createVectorFromMedia(env, samples_obj, function<decode::Sample(jobject)>([env, &index, &sample_objs](jobject sample_obj) -> decode::Sample {
      auto jni_sample = jni::Wrap(env, sample_obj);

      int64_t pts = (int64_t)jni_sample.get<jlong>("pts");
      int64_t dts = (int64_t)jni_sample.get<jlong>("dts");
      bool keyframe = (bool)jni_sample.get<jboolean>("keyframe");
      SampleType type = (SampleType)jni_sample.get<jbyte>("sampleType");

      sample_objs.push_back(sample_obj);  // store a local list of sample jobjects as well

      const uint8_t* _index = (const uint8_t*)index++;  // not pointing to a valid memory address, used for storing original index in a dummy common::Data32 class
      return (decode::Sample){ pts, dts, keyframe, type, [_index](){ return common::Data32(_index, 0, NULL); } };
    }));

    // collect the input edit boxes
    vector<common::EditBox> edit_boxes = jni::createVectorFromSeq<common::EditBox>(env, edit_boxes_obj, function<common::EditBox(jobject)>([env](jobject edit_box_obj) -> common::EditBox {
      auto jni_edit_box = jni::Wrap(env, edit_box_obj);
      return common::EditBox((int64_t)jni_edit_box.get<jlong>("startPts"),
                             (uint64_t)jni_edit_box.get<jlong>("durationPts"),
                             1.0f,
                             (SampleType)jni_edit_box.get<jbyte>("sampleType"));

    }));

    // get settings object, classify type of track
    auto settings_obj = jni::Wrap(env, samples_obj).get("settings", "Ljava/lang/Object;");
    auto jni_settings = jni::Wrap(env, settings_obj);
    string settings_type = jni_settings.class_name();

    // trim the track
    if (settings_type.find("com/twitter/vireo/settings/Video") == 0) {
      auto settings = jni::createVideoSettings(env, settings_obj);
      auto track = functional::Video<decode::Sample>(samples, settings);
      auto trimmed = transform::Trim<SampleType::Video>(track, edit_boxes, (uint64_t)start_ms, (uint64_t)duration_ms);
      for (auto sample: trimmed.track) {
        uint64_t index = (const uint64_t)sample.nal().data();
        jobject sample_obj = sample_objs[index];
        jni::Wrap jni_sample = jni::Wrap(env, sample_obj);
        jni_sample.set<jlong>("pts", (int64_t)sample.pts);
        jni_sample.set<jlong>("dts", (int64_t)sample.dts);
        jni->jni_samples.push_back(move(jni_sample));
      }
      jni->edit_boxes.insert(jni->edit_boxes.end(), trimmed.track.edit_boxes().begin(), trimmed.track.edit_boxes().end());
      jni->duration = trimmed.track.duration();

      // final setup for the associated Scala object
      jni::Wrap jni_trimmed_track = jni::Wrap(env, jni_trim.get("track", "Lcom/twitter/vireo/transform/Trim$Track;"));
      setVideoSettings(env, jni_trimmed_track, trimmed.track.settings());
      jni_trimmed_track.set<jint>("b", (uint32_t)jni->jni_samples.size());
    } else if (settings_type.find("com/twitter/vireo/settings/Audio") == 0) {
      auto settings = jni::createAudioSettings(env, settings_obj);
      auto track = functional::Audio<decode::Sample>(samples, settings);
      auto trimmed = transform::Trim<SampleType::Audio>(track, edit_boxes, (uint64_t)start_ms, (uint64_t)duration_ms);
      for (auto sample: trimmed.track) {
        uint64_t index = (const uint64_t)sample.nal().data();
        jobject sample_obj = sample_objs[index];
        jni::Wrap jni_sample = jni::Wrap(env, sample_obj);
        jni_sample.set<jlong>("pts", (int64_t)sample.pts);
        jni_sample.set<jlong>("dts", (int64_t)sample.dts);
        jni->jni_samples.push_back(move(jni_sample));
      }
      jni->edit_boxes.insert(jni->edit_boxes.end(), trimmed.track.edit_boxes().begin(), trimmed.track.edit_boxes().end());
      jni->duration = trimmed.track.duration();

      // final setup for the associated Scala object
      jni::Wrap jni_trimmed_track = jni::Wrap(env, jni_trim.get("track", "Lcom/twitter/vireo/transform/Trim$Track;"));
      setAudioSettings(env, jni_trimmed_track, trimmed.track.settings());
      jni_trimmed_track.set<jint>("b", (uint32_t)jni->jni_samples.size());
    } else if (settings_type.find("com/twitter/vireo/settings/Caption") == 0) {
      auto settings = jni::createCaptionSettings(env, settings_obj);
      auto track = functional::Caption<decode::Sample>(samples, settings);
      auto trimmed = transform::Trim<SampleType::Caption>(track, edit_boxes, (uint64_t)start_ms, (uint64_t)duration_ms);
      for (auto sample: trimmed.track) {
        uint64_t index = (const uint64_t)sample.nal().data();
        jobject sample_obj = sample_objs[index];
        jni::Wrap jni_sample = jni::Wrap(env, sample_obj);
        jni_sample.set<jlong>("pts", (int64_t)sample.pts);
        jni_sample.set<jlong>("dts", (int64_t)sample.dts);
        jni->jni_samples.push_back(move(jni_sample));
      }
      jni->edit_boxes.insert(jni->edit_boxes.end(), trimmed.track.edit_boxes().begin(), trimmed.track.edit_boxes().end());
      jni->duration = trimmed.track.duration();

      // final setup for the associated Scala object
      jni::Wrap jni_trimmed_track = jni::Wrap(env, jni_trim.get("track", "Lcom/twitter/vireo/transform/Trim$Track;"));
      setCaptionSettings(env, jni_trimmed_track, trimmed.track.settings());
      jni_trimmed_track.set<jint>("b", (uint32_t)jni->jni_samples.size());
    } else {
      THROW_IF(true, Invalid);
    }
  }, [env, trim_obj] {
    Java_com_twitter_vireo_transform_jni_Trim_jniClose(env, trim_obj);
  });
}