in src/python/pants/backend/jvm/tasks/jvm_dependency_check.py [0:0]
def _compute_unnecessary_deps(self, target, actual_deps):
"""Computes unused deps for the given Target.
:returns: A dict of directly declared but unused targets, to sets of suggested replacements.
"""
# Flatten the product deps of this target.
product_deps = set()
for dep_entries in actual_deps.values():
product_deps.update(dep_entries)
# Determine which of the DEFAULT deps in the declared set of this target were used.
used = set()
unused = set()
for dep, _ in self._analyzer.resolve_aliases(target, scope=Scopes.DEFAULT):
if dep in used or dep in unused:
continue
# TODO: What's a better way to accomplish this check? Filtering by `has_sources` would
# incorrectly skip "empty" `*_library` targets, which could then be used as a loophole.
if isinstance(dep, (Resources, UnpackedJars)):
continue
# If any of the target's jars or classfiles were used, consider it used.
if product_deps.isdisjoint(self._analyzer.files_for_target(dep)):
unused.add(dep)
else:
used.add(dep)
# If there were no unused deps, break.
if not unused:
return {}
# For any deps that were used, count their derived-from targets used as well.
# TODO: Refactor to do some of this above once tests are in place.
for dep in list(used):
for derived_from in dep.derived_from_chain:
if derived_from in unused:
unused.remove(derived_from)
used.add(derived_from)
# Prune derived targets that would be in the set twice.
for dep in list(unused):
if set(dep.derived_from_chain) & unused:
unused.remove(dep)
if not unused:
return {}
# For any deps that were not used, determine whether their transitive deps were used, and
# recommend those as replacements.
replacements = {}
for dep in unused:
replacements[dep] = set()
for t in dep.closure():
if t in used or t in unused:
continue
if not product_deps.isdisjoint(self._analyzer.files_for_target(t)):
replacements[dep].add(t.concrete_derived_from)
return replacements