in docker/services/resize/resize_service.py [0:0]
def recommend_size(self, trend, instance_type, resize_action,
cloud, algorithm, instance_meta=None, allow_recursion=True,
max_results=5, parent_meta=None,
shape_compatibility_rule=None,
past_resize_recommendations: List[
RecommendationHistory] = None):
# todo process instance meta
current_shape: Shape = self.shape_service.get(
name=instance_type)
if not current_shape:
_LOG.error(f'Unknown instance type: {instance_type}')
raise ExecutorException(
step_name=JOB_STEP_GENERATE_REPORTS,
reason=f'Unknown instance type: {instance_type}'
)
if not trend.requires_resize():
if resize_action == ACTION_SPLIT:
result_shape = current_shape.get_dto()
result_shape['probability'] = trend.probability
return [result_shape]
return []
cpu_min, cpu_max = trend.get_metric_ranges(
metric=trend.cpu_load,
provided=current_shape.cpu)
memory_min, memory_max = trend.get_metric_ranges(
metric=trend.memory_load,
provided=current_shape.memory)
net_output_min, _ = self.get_suitable_ranges(
perc90=trend.net_output_load.threshold,
provided=current_shape.network_throughput
)
net_output_min, _ = trend.get_metric_ranges(
metric=trend.net_output_load,
provided=current_shape.network_throughput,
only_for_non_empty=True
)
disk_iops_min, _ = trend.get_metric_ranges(
metric=trend.avg_disk_iops,
provided=current_shape.iops,
only_for_non_empty=True
)
all_shapes = self.shape_service.list(cloud=current_shape.cloud)
if parent_meta:
_LOG.debug(f'Applying parent meta: '
f'{parent_meta.as_dict()}')
all_shapes = self.customer_preferences_service. \
get_allowed_instance_types(
cloud=current_shape.cloud.value,
instances_data=all_shapes,
parent_meta=parent_meta
)
all_shapes = ShapeCompatibilityFilter().apply_compatibility_filter(
current_shape=current_shape,
shapes=all_shapes,
compatibility_rule=shape_compatibility_rule
)
if past_resize_recommendations:
all_shapes = self.apply_adjustment(
shapes=all_shapes,
recommendations=past_resize_recommendations)
forbid_change_series = algorithm.recommendation_settings.\
forbid_change_series
forbid_change_family = algorithm.recommendation_settings.\
forbid_change_family
prioritized_shapes = self.divide_by_priority(
sizes=all_shapes,
current_shape=current_shape,
cloud=cloud,
resize_action=resize_action,
parent_meta=parent_meta,
forbid_change_series=forbid_change_series,
forbid_change_family=forbid_change_family
)
suitable_shapes = self.find_suitable_shapes(
cpu_min=cpu_min,
cpu_max=cpu_max,
memory_min=memory_min,
memory_max=memory_max,
net_output_min=net_output_min,
disk_iops_min=disk_iops_min,
prioritized_shapes=prioritized_shapes
)
suitable_shapes = self._remove_shape_duplicates(
shapes=suitable_shapes)
if not suitable_shapes or len(suitable_shapes) < max_results:
if allow_recursion:
_LOG.warning(f'No suitable same-series shape found. '
f'Going to discard requirement for metric '
f'with scale down.')
trend.discard_optional_requirements()
recs = self.recommend_size(
trend=trend,
instance_type=instance_type,
resize_action=resize_action,
algorithm=algorithm,
cloud=cloud,
instance_meta=instance_meta,
parent_meta=parent_meta,
allow_recursion=False,
shape_compatibility_rule=shape_compatibility_rule,
past_resize_recommendations=past_resize_recommendations)
return self._remove_shape_duplicates(
shapes=suitable_shapes + recs,
max_results=max_results
)
elif suitable_shapes and len(suitable_shapes) < max_results:
_LOG.warning(f'Not enough suitable shapes found.')
return suitable_shapes
else:
_LOG.warning(f'No suitable shapes found')
return []
if resize_action == ACTION_SPLIT:
probability = trend.probability
for shape in suitable_shapes:
shape['probability'] = probability
result = self._remove_shape_duplicates(
shapes=suitable_shapes,
max_results=max_results)
return result