def create_compile_jobs()

in src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py [0:0]


  def create_compile_jobs(self,
                          compile_target,
                          compile_contexts,
                          invalid_dependencies,
                          ivts,
                          counter,
                          runtime_classpath_product):

    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
        #   and rsc compiles of rsc compatible targets.
        # - Run Rsc on the current target with those as dependencies.

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        rsc_deps_classpath_unprocessed = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(dependencies_for_target),
          collection_type=OrderedSet)

        rsc_classpath_rel = fast_relpath_collection(list(rsc_deps_classpath_unprocessed))

        ctx.ensure_output_dirs_exist()

        with Timer() as timer:
          # Outline Scala sources into SemanticDB / scalac compatible header jars.
          # ---------------------------------------------
          rsc_jar_file = fast_relpath(ctx.rsc_jar_file, get_buildroot())

          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)

          distribution = self._get_jvm_distribution()

          def hermetic_digest_classpath():
            jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(distribution)
            merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
              (jdk_libs_digest, sources_snapshot.directory_digest))
            classpath_rel_jdk = rsc_classpath_rel + jdk_libs_rel
            return (merged_sources_and_jdk_digest, classpath_rel_jdk)
          def nonhermetic_digest_classpath():
            classpath_abs_jdk = rsc_classpath_rel + self._jdk_libs_abs(distribution)
            return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

          (input_digest, classpath_entry_paths) = self.execution_strategy_enum.resolve_for_enum_variant({
            self.HERMETIC: hermetic_digest_classpath,
            self.SUBPROCESS: nonhermetic_digest_classpath,
            self.NAILGUN: nonhermetic_digest_classpath,
          })()

          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(classpath_entry_paths),
                   '-d', rsc_jar_file,
                 ] + target_sources

          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_classpath_rel),
            input_digest=input_digest,
            output_dir=os.path.dirname(rsc_jar_file))

        self._record_target_stats(tgt,
          len(rsc_classpath_rel),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)

    rsc_jobs = []
    zinc_jobs = []

    # Invalidated targets are a subset of relevant targets: get the context for this one.
    compile_target = ivts.target
    rsc_compile_context, zinc_compile_context = compile_contexts[compile_target]

    def all_zinc_rsc_invalid_dep_keys(invalid_deps):
      for tgt in invalid_deps:
        # None can occur for e.g. JarLibrary deps, which we don't need to compile as they are
        # populated in the resolve goal.
        tgt_rsc_cc, tgt_z_cc = compile_contexts[tgt]
        if tgt_rsc_cc.workflow is not None:
          # Rely on the results of zinc compiles for zinc-compatible targets
          yield self._key_for_target_as_dep(tgt, tgt_rsc_cc.workflow)

    def make_rsc_job(target, dep_targets):
      return Job(
        self._rsc_key_for_target(target),
        functools.partial(
          work_for_vts_rsc,
          ivts,
          rsc_compile_context),
        # The rsc jobs depend on other rsc jobs, and on zinc jobs for targets that are not
        # processed by rsc.
        list(all_zinc_rsc_invalid_dep_keys(dep_targets)),
        self._size_estimator(rsc_compile_context.sources),
      )

    def only_zinc_invalid_dep_keys(invalid_deps):
      for tgt in invalid_deps:
        rsc_cc_tgt, zinc_cc_tgt = compile_contexts[tgt]
        if rsc_cc_tgt.workflow is not None:
          yield self._zinc_key_for_target(tgt, rsc_cc_tgt.workflow)

    def make_zinc_job(target, input_product_key, output_products, dep_keys):
      return Job(
        key=self._zinc_key_for_target(target, rsc_compile_context.workflow),
        fn=functools.partial(
          self._default_work_for_vts,
          ivts,
          zinc_compile_context,
          input_product_key,
          counter,
          compile_contexts,
          CompositeProductAdder(*output_products)),
        dependencies=list(dep_keys),
        size=self._size_estimator(zinc_compile_context.sources),
        on_success=ivts.update,
      )

    # Create the rsc job.
    # Currently, rsc only supports outlining scala.
    workflow = rsc_compile_context.workflow
    workflow.resolve_for_enum_variant({
      'zinc-only': lambda: None,
      'rsc-then-zinc': lambda: rsc_jobs.append(make_rsc_job(compile_target, invalid_dependencies)),
    })()

    # Create the zinc compile jobs.
    # - Scala zinc compile jobs depend on the results of running rsc on the scala target.
    # - Java zinc compile jobs depend on the zinc compiles of their dependencies, because we can't
    #   generate jars that make javac happy at this point.
    workflow.resolve_for_enum_variant({
      # NB: zinc-only zinc jobs run zinc and depend on zinc compile outputs.
      'zinc-only': lambda: zinc_jobs.append(
        make_zinc_job(
          compile_target,
          input_product_key='runtime_classpath',
          output_products=[
            runtime_classpath_product,
            self.context.products.get_data('rsc_classpath')],
          dep_keys=only_zinc_invalid_dep_keys(invalid_dependencies))),
      'rsc-then-zinc': lambda: zinc_jobs.append(
        # NB: rsc-then-zinc jobs run zinc and depend on both rsc and zinc compile outputs.
        make_zinc_job(
          compile_target,
          input_product_key='rsc_classpath',
          output_products=[
            runtime_classpath_product,
          ],
          # TODO: remove this dep and fix tests!!!
          dep_keys=[
            # TODO we could remove the dependency on the rsc target in favor of bumping
            # the cache separately. We would need to bring that dependency back for
            # sub-target parallelism though.
            self._rsc_key_for_target(compile_target)
          ] + list(all_zinc_rsc_invalid_dep_keys(invalid_dependencies))
        )),
    })()

    return rsc_jobs + zinc_jobs