def execute()

in src/python/pants/backend/jvm/tasks/jar_publish.py [0:0]


  def execute(self):
    self.check_clean_master(commit=(not self.dryrun and self.commit))

    exported_targets = self.exported_targets()
    self.check_targets(exported_targets)

    pushdbs = {}

    def get_db(tgt):
      # TODO(tdesai) Handle resource type in get_db.
      if tgt.provides is None:
        raise TaskError('trying to publish target {!r} which does not provide an artifact'.format(tgt))
      dbfile = tgt.provides.repo.push_db(tgt)
      result = pushdbs.get(dbfile)
      if not result:
        # Create an empty pushdb if no dbfile exists.
        if (os.path.exists(dbfile)):
          db = PushDb.load(dbfile)
        else:
          safe_mkdir(os.path.dirname(dbfile))
          db = PushDb()
        try:
          repo = self.repos[tgt.provides.repo.name]
        except KeyError:
          raise TaskError('Repository {0} has no entry in the --repos option.'.format(
            tgt.provides.repo.name))
        result = (db, dbfile, repo)
        pushdbs[dbfile] = result
      return result

    def get_pushdb(tgt):
      return get_db(tgt)[0]

    def fingerprint_internal(tgt):
      pushdb = get_pushdb(tgt)
      entry = pushdb.get_entry(tgt)
      return entry.fingerprint or '0.0.0'

    def stage_artifacts(tgt, jar, version, tag, changelog):
      publications = OrderedSet()

      # TODO Remove this once we fix https://github.com/pantsbuild/pants/issues/1229
      if (not self.context.products.get('jars').has(tgt) and
          not self.get_options().individual_plugins):
        raise TaskError('Expected to find a primary artifact for {} but there was no jar for it.'
                        .format(tgt.address.reference()))

      # TODO Remove this guard once we fix https://github.com/pantsbuild/pants/issues/1229, there
      # should always be a primary artifact.
      if self.context.products.get('jars').has(tgt):
        self._copy_artifact(tgt, jar, version, typename='jars')
        publications.add(self.Publication(name=jar.name, classifier=None, ext='jar'))

        self.create_source_jar(tgt, jar, version)
        publications.add(self.Publication(name=jar.name, classifier='sources', ext='jar'))

        # don't request docs unless they are available for all transitive targets
        # TODO: doc products should be checked by an independent jar'ing task, and
        # conditionally enabled; see https://github.com/pantsbuild/pants/issues/568
        doc_jar = self.create_doc_jar(tgt, jar, version)
        if doc_jar:
          publications.add(self.Publication(name=jar.name, classifier='javadoc', ext='jar'))

        if self.publish_changelog:
          changelog_path = self.artifact_path(jar, version, suffix='-CHANGELOG', extension='txt')
          with safe_open(changelog_path, 'w') as changelog_file:
            changelog_file.write(changelog)
          publications.add(self.Publication(name=jar.name, classifier='CHANGELOG', ext='txt'))

      # Process any extra jars that might have been previously generated for this target, or a
      # target that it was derived from.
      for extra_product, extra_config in (self.get_options().publish_extras or {}).items():
        override_name = jar.name
        if 'override_name' in extra_config:
          # If the supplied string has a '{target_provides_name}' in it, replace it with the
          # current jar name. If not, the string will be taken verbatim.
          override_name = extra_config['override_name'].format(target_provides_name=jar.name)

        classifier = None
        suffix = ''
        if 'classifier' in extra_config:
          classifier = extra_config['classifier']
          suffix = "-{0}".format(classifier)

        extension = extra_config.get('extension', 'jar')

        extra_pub = self.Publication(name=override_name, classifier=classifier, ext=extension)

        # A lot of flexibility is allowed in parameterizing the extra artifact, ensure those
        # parameters lead to a unique publication.
        # TODO(John Sirois): Check this much earlier.
        if extra_pub in publications:
          raise TaskError("publish_extra for '{0}' must override one of name, classifier or "
                          "extension with a non-default value.".format(extra_product))

        # Build a list of targets to check. This list will consist of the current target, plus the
        # entire derived_from chain.
        target_list = [tgt]
        target = tgt
        while target.derived_from != target:
          target_list.append(target.derived_from)
          target = target.derived_from
        for cur_tgt in target_list:
          if self.context.products.get(extra_product).has(cur_tgt):
            self._copy_artifact(cur_tgt, jar, version, typename=extra_product, suffix=suffix,
                                extension=extension, override_name=override_name)
            publications.add(extra_pub)

      pom_path = self.artifact_path(jar, version, extension='pom')
      PomWriter(get_pushdb, tag).write(tgt, path=pom_path)
      return publications

    if self.overrides:
      print('\nPublishing with revision overrides:')
      for (org, name), rev in self.overrides.items():
        print('{0}={1}'.format(coordinate(org, name), rev))

    head_sha = self.scm.commit_id if self.scm else None

    safe_rmtree(self.workdir)
    published = []
    skip = (self.restart_at is not None)
    for target in exported_targets:
      pushdb, dbfile, repo = get_db(target)
      oldentry = pushdb.get_entry(target)

      # the jar version is ignored here, since it is overridden below with the new entry
      jar, _ = target.get_artifact_info()
      published.append(jar)

      if skip and (jar.org, jar.name) == self.restart_at:
        skip = False
      # select the next version: either a named version, or semver via the pushdb/overrides
      if self.named_snapshot:
        newentry = oldentry.with_named_ver(self.named_snapshot)
      else:
        override = self.overrides.get((jar.org, jar.name))
        sem_ver = override if override else oldentry.sem_ver.bump()
        if self.local_snapshot:
          sem_ver = sem_ver.make_snapshot()

        if sem_ver <= oldentry.sem_ver:
          raise TaskError('Requested version {} must be greater than the current version {}'.format(
            sem_ver, oldentry.sem_ver
          ))
        newentry = oldentry.with_sem_ver(sem_ver)

      newfingerprint = self.entry_fingerprint(target, fingerprint_internal)
      newentry = newentry.with_sha_and_fingerprint(head_sha, newfingerprint)
      no_changes = newentry.fingerprint == oldentry.fingerprint

      changelog = ''
      if self.publish_changelog:
        if no_changes:
          changelog = 'No changes for {0} - forced push.\n'.format(pushdb_coordinate(jar, oldentry))
        else:
          changelog = self.changelog(target, oldentry.sha) or 'Direct dependencies changed.\n'

      org = jar.org
      name = jar.name
      rev = newentry.version().version()
      tag_name = '{org}-{name}-{rev}'.format(org=org, name=name, rev=rev) if self.commit else None

      if no_changes and not self.force:
        print('No changes for {0}'.format(pushdb_coordinate(jar, oldentry)))
        stage_artifacts(target, jar, oldentry.version().version(), tag_name, changelog)
      elif skip:
        print('Skipping {} to resume at {}'.format(
          jar_coordinate(jar, (newentry.version() if self.force else oldentry.version()).version()),
          coordinate(self.restart_at[0], self.restart_at[1])
        ))
        stage_artifacts(target, jar, oldentry.version().version(), tag_name, changelog)
      else:
        if not self.dryrun:
          # Confirm push looks good
          if self.publish_changelog:
            if no_changes:
              print(changelog)
            else:
              # The changelog may contain non-ascii text, but in Py2 the print function can, under certain
              # circumstances, incorrectly detect the output encoding to be ascii and thus blow up
              # on non-ascii changelog characters.  Here we explicitly control the encoding to avoid
              # the print function's mis-interpretation.
              # TODO(John Sirois): Consider introducing a pants/util `print_safe` helper for this.
              message = '\nChanges for {} since {} @ {}:\n\n{}\n'.format(
                  coordinate(jar.org, jar.name), oldentry.version(), oldentry.sha, changelog)
              # The stdout encoding can be detected as None when running without a tty (common in
              # tests), in which case we want to force encoding with a unicode-supporting codec.
              # In Py3, sys.stdout is a unicode stream.
              sys.stdout.write(message) if PY3 else sys.stdout.write(message.encode('utf-8'))
          if not self.confirm_push(coordinate(jar.org, jar.name), newentry.version()):
            raise TaskError('User aborted push')

        pushdb.set_entry(target, newentry)
        publications = stage_artifacts(target, jar, rev, tag_name, changelog)

        if self.dryrun:
          print('Skipping publish of {0} in test mode.'.format(pushdb_coordinate(jar, newentry)))
        else:
          self.publish(publications, jar=jar, entry=newentry, repo=repo, published=published)

          if self.commit:
            coord = coordinate(org, name, rev)

            pushdb.dump(dbfile)

            self.publish_pushdb_changes_to_remote_scm(
              pushdb_file=dbfile,
              coordinate=coord,
              tag_name=tag_name,
              tag_message='Publish of {coordinate} initiated by {user} {cause}'.format(
                coordinate=coord,
                user=getpass.getuser(),
                cause='with forced revision' if (org, name) in self.overrides else '(autoinc)',
              ),
              postscript=self.push_postscript
            )