in scripts/patch/2.11/s3_patch_metric_format_onprem.py [0:0]
def main():
print("Parsing arguments")
args = parse_args()
local_folder = os.path.join(pathlib.Path(__file__).parent,
'temp-pre-patched')
patched = os.path.join(pathlib.Path(__file__).parent, 'temp-patched')
pathlib.Path(local_folder).mkdir(exist_ok=True)
print('Exporting env variables')
export_args(**args)
print('Initializing MinIO client')
config = Config(retries={
'max_attempts': 10,
'mode': 'standard'
})
config = config.merge(Config(s3={
'signature_version': 's3v4',
'addressing_style': 'path'
}))
session = boto3.Session(
aws_access_key_id=args.get('access_key'),
aws_secret_access_key=args.get('secret_key')
)
url = f'http://{args.get("minio_host")}:{args.get("minio_port")}'
client = session.client('s3', endpoint_url=url, config=config)
bucket_name = args['metric_bucket_name']
prefix = args['prefix']
allowed_actions = args.get('action')
if ACTION_DOWNLOAD in allowed_actions:
print('Downloading metrics from S3')
download_dir(
local=local_folder,
bucket=bucket_name,
prefix=prefix,
client=client
)
print(f'S3 files were downloaded to {local_folder}')
if ACTION_PATCH in allowed_actions:
print('Patching metric files')
files = list(pathlib.Path(local_folder).rglob("*.csv"))
file_paths = list_files(files=files)
for file_path in file_paths:
process_file(
file_path=file_path,
output_folder_path=patched,
folder_prefix=prefix
)
print(f'Patched metrics were saved to {patched}')
if ACTION_UPLOAD in allowed_actions:
print('Uploading patched metrics')
upload_dir(
folder_file_path=patched,
bucket_name=bucket_name,
client=client
)
print('Patched metrics have been uploaded')
if ACTION_CLEANUP in allowed_actions:
print('Removing old metric files from s3')
delete_s3_keys(
client=client,
bucket=bucket_name,
folder_path=local_folder
)
print('Old metrics have been removed from s3')