def post()

in src/lambdas/custodian_api_handler/handlers/job_handler.py [0:0]


    def post(self, event: JobPostModel, _tap: TenantsAccessPayload):
        """
        Post job for the given tenant
        :param event:
        :param _tap:
        :return:
        """
        _LOG.info('Job post event came')
        tenant = self._obtain_tenant(event.tenant_name, _tap,
                                     event.customer_id)
        domain = RuleDomain.from_tenant_cloud(tenant.cloud)
        if not domain:
            raise ResponseFactory(HTTPStatus.BAD_REQUEST).message(
                f'Cannot start job for tenant with cloud {tenant.cloud}'
            ).exc()

        credentials_key = None
        if event.credentials:
            _LOG.info('Credentials were provided. Saving to secrets manager')
            credentials = event.credentials.dict()
            if not self._environment_service.skip_cloud_identifier_validation():
                _LOG.info('Validating cloud identifier')
                self._validate_cloud_identifier(
                    credentials=credentials,
                    cloud_identifier=tenant.project,
                    cloud=tenant.cloud.upper()
                )
            credentials_key = self._ssm.prepare_name(tenant.name)
            self._ssm.create_secret(
                secret_name=credentials_key,
                secret_value=credentials,
                ttl=1800  # should be enough for on-prem
            )

        regions_to_scan = self._resolve_regions_to_scan(
            target_regions=event.target_regions,
            tenant=tenant
        )

        if not self._environment_service.allow_simultaneous_jobs_for_one_tenant():
            _LOG.debug('Setting job lock')
            lock = TenantSettingJobLock(event.tenant_name)
            if job_id := lock.locked_for(regions_to_scan):
                raise ResponseFactory(HTTPStatus.FORBIDDEN).message(
                    f'Some requested regions are already being '
                    f'scanned in another tenant`s job {job_id}'
                ).exc()

        standard_rulesets, lic, licensed_rulesets = self._get_rulesets_for_scan(
            tenant=tenant,
            domain=domain,
            license_key=event.license_key,
            ruleset_names=set(event.iter_rulesets())
        )

        rules_to_scan = event.rules_to_scan
        if rules_to_scan:
            _LOG.info('Rules to scan were provided. Resolving them')
            available = set(
                chain.from_iterable(
                    r.rules for r in chain(standard_rulesets, licensed_rulesets)  # not a bug, rules attribute is injected
                )
            )
            resolver = RuleNamesResolver(
                resolve_from=list(available),
                allow_multiple=True
            )
            resolved, not_resolved = [], []
            for rule, is_resolved in resolver.resolve_multiple_names(
                    event.rules_to_scan):
                if is_resolved:
                    resolved.append(rule)
                else:
                    not_resolved.append(rule)
            if not_resolved:
                return build_response(
                    code=HTTPStatus.BAD_REQUEST,
                    content=f'These rules are not allowed by your '
                            f'{tenant.cloud} '
                            f'license: {", ".join(not_resolved)}'
                )
            rules_to_scan = resolved

        ttl_days = self._environment_service.jobs_time_to_live_days()
        ttl = None
        if ttl_days:
            ttl = timedelta(days=ttl_days)

        job = self._job_service.create(
            customer_name=tenant.customer_name,
            tenant_name=tenant.name,
            regions=list(regions_to_scan),
            rulesets=self._serialize_rulesets(standard_rulesets, lic, licensed_rulesets),
            rules_to_scan=list(rules_to_scan or []),
            ttl=ttl,
            affected_license=lic.license_key if lic else None
        )
        self._job_service.save(job)

        envs = self._assemble_service.build_job_envs(
            tenant=tenant,
            job_id=job.id,
            target_regions=list(regions_to_scan),
            credentials_key=credentials_key,
            job_lifetime_minutes=event.timeout_minutes,
            affected_licenses=lic.tenant_license_key(tenant.customer_name) if lic else None
        )
        bid = self._submit_job_to_batch(tenant=tenant, job=job, envs=envs)
        self._job_service.update(job, bid)
        TenantSettingJobLock(tenant.name).acquire(job.id, regions_to_scan)
        return build_response(
            code=HTTPStatus.CREATED,
            content=self._job_service.dto(job)
        )