|
@ -18,7 +18,7 @@ from minio.error import NoSuchKey |
|
|
# Size of the buffer to read files with |
|
|
# Size of the buffer to read files with |
|
|
BUF_SIZE = 4096 |
|
|
BUF_SIZE = 4096 |
|
|
|
|
|
|
|
|
#Application Version |
|
|
|
|
|
|
|
|
# Application Version |
|
|
VERSION = "1.4.0" |
|
|
VERSION = "1.4.0" |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -80,7 +80,8 @@ def run_asyncio_commands(tasks, max_concurrent_tasks=0): |
|
|
num_chunks = len(chunks) |
|
|
num_chunks = len(chunks) |
|
|
else: |
|
|
else: |
|
|
chunks = make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks) |
|
|
chunks = make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks) |
|
|
num_chunks = len(list(make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks))) |
|
|
|
|
|
|
|
|
num_chunks = len( |
|
|
|
|
|
list(make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks))) |
|
|
|
|
|
|
|
|
if asyncio.get_event_loop().is_closed(): |
|
|
if asyncio.get_event_loop().is_closed(): |
|
|
asyncio.set_event_loop(asyncio.new_event_loop()) |
|
|
asyncio.set_event_loop(asyncio.new_event_loop()) |
|
@ -196,7 +197,7 @@ def add_nested_key(config: Dict[str, any], path: List[str], value: str) -> bool: |
|
|
else: |
|
|
else: |
|
|
if target not in config: |
|
|
if target not in config: |
|
|
config[target] = {} |
|
|
config[target] = {} |
|
|
add_nested_key(config[target], path[1:],value) |
|
|
|
|
|
|
|
|
add_nested_key(config[target], path[1:], value) |
|
|
return False |
|
|
return False |
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -204,7 +205,8 @@ def read_env_config(prefix, separator='__') -> any: |
|
|
prefix = prefix+separator |
|
|
prefix = prefix+separator |
|
|
env_config = {} |
|
|
env_config = {} |
|
|
|
|
|
|
|
|
environment_variables = [env for env in os.environ.keys() if env.startswith(prefix)] |
|
|
|
|
|
|
|
|
environment_variables = [ |
|
|
|
|
|
env for env in os.environ.keys() if env.startswith(prefix)] |
|
|
|
|
|
|
|
|
for env in environment_variables: |
|
|
for env in environment_variables: |
|
|
path = env[len(prefix):].split('__') |
|
|
path = env[len(prefix):].split('__') |
|
@ -212,16 +214,16 @@ def read_env_config(prefix, separator='__') -> any: |
|
|
|
|
|
|
|
|
return env_config |
|
|
return env_config |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_config(path: str) -> any: |
|
|
def load_config(path: str) -> any: |
|
|
combined_config = {} |
|
|
combined_config = {} |
|
|
with open( |
|
|
with open( |
|
|
os.path.join( |
|
|
|
|
|
os.path.dirname(os.path.realpath(__file__)), |
|
|
|
|
|
'acm-config-default.json'), |
|
|
|
|
|
'r') as combined_config_file: |
|
|
|
|
|
|
|
|
os.path.join( |
|
|
|
|
|
os.path.dirname(os.path.realpath(__file__)), |
|
|
|
|
|
'acm-config-default.json'), |
|
|
|
|
|
'r') as combined_config_file: |
|
|
combined_config = json.load(combined_config_file) |
|
|
combined_config = json.load(combined_config_file) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
config = {} |
|
|
config = {} |
|
|
with open(path, 'r') as config_file: |
|
|
with open(path, 'r') as config_file: |
|
|
config = json.load(config_file) |
|
|
config = json.load(config_file) |
|
@ -236,10 +238,12 @@ def load_config(path: str) -> any: |
|
|
update(combined_config, read_env_config('ACM')) |
|
|
update(combined_config, read_env_config('ACM')) |
|
|
|
|
|
|
|
|
# Calculate profiles hash |
|
|
# Calculate profiles hash |
|
|
profile_hashes={} |
|
|
|
|
|
profile_hashes['all'] = get_string_sha256sum(json.dumps(combined_config['profiles'])) |
|
|
|
|
|
|
|
|
profile_hashes = {} |
|
|
|
|
|
profile_hashes['all'] = get_string_sha256sum( |
|
|
|
|
|
json.dumps(combined_config['profiles'])) |
|
|
for profile in combined_config['profiles'].keys(): |
|
|
for profile in combined_config['profiles'].keys(): |
|
|
profile_hashes[profile] = get_string_sha256sum(json.dumps(combined_config['profiles'][profile])) |
|
|
|
|
|
|
|
|
profile_hashes[profile] = get_string_sha256sum( |
|
|
|
|
|
json.dumps(combined_config['profiles'][profile])) |
|
|
|
|
|
|
|
|
combined_config['profileHashes'] = profile_hashes |
|
|
combined_config['profileHashes'] = profile_hashes |
|
|
|
|
|
|
|
@ -356,7 +360,8 @@ def check_matched_files_hashes(ctx, context, print_identity, profile, files): |
|
|
try: |
|
|
try: |
|
|
file_object = s3.get_object(s3_bucket, file_identity) |
|
|
file_object = s3.get_object(s3_bucket, file_identity) |
|
|
stored_data = json.load(file_object) |
|
|
stored_data = json.load(file_object) |
|
|
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(stored_data, profile, file) |
|
|
|
|
|
|
|
|
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum( |
|
|
|
|
|
stored_data, profile, file) |
|
|
if calculated_file_hash == stored_file_hash \ |
|
|
if calculated_file_hash == stored_file_hash \ |
|
|
and ctx.obj['CONFIG']['profileHashes'][profile] == stored_profile_hash: |
|
|
and ctx.obj['CONFIG']['profileHashes'][profile] == stored_profile_hash: |
|
|
if print_identity: |
|
|
if print_identity: |
|
@ -392,7 +397,8 @@ def check_changed_files_hashes(ctx, context, profile, files): |
|
|
try: |
|
|
try: |
|
|
file_object = s3.get_object(s3_bucket, file_identity) |
|
|
file_object = s3.get_object(s3_bucket, file_identity) |
|
|
stored_data = json.load(file_object) |
|
|
stored_data = json.load(file_object) |
|
|
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(stored_data, profile, file) |
|
|
|
|
|
|
|
|
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum( |
|
|
|
|
|
stored_data, profile, file) |
|
|
if calculated_file_hash != stored_file_hash \ |
|
|
if calculated_file_hash != stored_file_hash \ |
|
|
or ctx.obj['CONFIG']['profileHashes'][profile] != stored_profile_hash: |
|
|
or ctx.obj['CONFIG']['profileHashes'][profile] != stored_profile_hash: |
|
|
changed_files.append(file) |
|
|
changed_files.append(file) |
|
@ -485,7 +491,8 @@ def store_files(ctx, context, files): |
|
|
content_type="application/octet-stream" |
|
|
content_type="application/octet-stream" |
|
|
) |
|
|
) |
|
|
if 'ADD_PREFIX' in ctx.obj and ctx.obj['ADD_PREFIX'] is not None: |
|
|
if 'ADD_PREFIX' in ctx.obj and ctx.obj['ADD_PREFIX'] is not None: |
|
|
stored_files.append(os.path.join(ctx.obj['ADD_PREFIX'], file_identity)) |
|
|
|
|
|
|
|
|
stored_files.append(os.path.join( |
|
|
|
|
|
ctx.obj['ADD_PREFIX'], file_identity)) |
|
|
else: |
|
|
else: |
|
|
stored_files.append(file) |
|
|
stored_files.append(file) |
|
|
except ResponseError as e: |
|
|
except ResponseError as e: |
|
@ -523,7 +530,8 @@ def retrieve_files(ctx, context, destination, files): |
|
|
) |
|
|
) |
|
|
retrieved_files.append(file_destination) |
|
|
retrieved_files.append(file_destination) |
|
|
except NoSuchKey as e: |
|
|
except NoSuchKey as e: |
|
|
print(f'ERROR: {file_identity} {file_destination} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
|
print( |
|
|
|
|
|
f'ERROR: {file_identity} {file_destination} {e}', file=sys.stderr) |
|
|
except ResponseError as e: |
|
|
except ResponseError as e: |
|
|
print(f'ERROR: {file_destination} {e}', file=sys.stderr) |
|
|
print(f'ERROR: {file_destination} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
@ -587,14 +595,15 @@ def clean_files(ctx, context, context_data, dry_run, files): |
|
|
found_data_objects: List[str] = [] |
|
|
found_data_objects: List[str] = [] |
|
|
for obj in s3.list_objects_v2(s3_data_bucket, recursive=False): |
|
|
for obj in s3.list_objects_v2(s3_data_bucket, recursive=False): |
|
|
if obj.is_dir: |
|
|
if obj.is_dir: |
|
|
found_data_objects.extend(list_s3_dir(s3, s3_data_bucket, obj.object_name)) |
|
|
|
|
|
|
|
|
found_data_objects.extend(list_s3_dir( |
|
|
|
|
|
s3, s3_data_bucket, obj.object_name)) |
|
|
else: |
|
|
else: |
|
|
found_data_objects.append(obj.object_name) |
|
|
found_data_objects.append(obj.object_name) |
|
|
|
|
|
|
|
|
# print(os.linesep.join(found_data_objects)) |
|
|
# print(os.linesep.join(found_data_objects)) |
|
|
|
|
|
|
|
|
for file_identity in found_objects: |
|
|
for file_identity in found_objects: |
|
|
if not file_identity in found_files: |
|
|
|
|
|
|
|
|
if file_identity not in found_files: |
|
|
if dry_run: |
|
|
if dry_run: |
|
|
removed_files.append(f'{s3_bucket}:{file_identity}') |
|
|
removed_files.append(f'{s3_bucket}:{file_identity}') |
|
|
else: |
|
|
else: |
|
@ -602,18 +611,21 @@ def clean_files(ctx, context, context_data, dry_run, files): |
|
|
s3.remove_object(s3_bucket, file_identity) |
|
|
s3.remove_object(s3_bucket, file_identity) |
|
|
removed_files.append(f'{s3_bucket}:{file_identity}') |
|
|
removed_files.append(f'{s3_bucket}:{file_identity}') |
|
|
except ResponseError as e: |
|
|
except ResponseError as e: |
|
|
print(f'ERROR: {s3_bucket}:{file_identity} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
|
print( |
|
|
|
|
|
f'ERROR: {s3_bucket}:{file_identity} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
|
for file_data_identity in found_data_objects: |
|
|
for file_data_identity in found_data_objects: |
|
|
if not file_data_identity in found_data_files: |
|
|
|
|
|
|
|
|
if file_data_identity not in found_data_files: |
|
|
if dry_run: |
|
|
if dry_run: |
|
|
removed_files.append(f'{s3_data_bucket}:{file_data_identity}') |
|
|
removed_files.append(f'{s3_data_bucket}:{file_data_identity}') |
|
|
else: |
|
|
else: |
|
|
try: |
|
|
try: |
|
|
s3.remove_object(s3_data_bucket, file_data_identity) |
|
|
s3.remove_object(s3_data_bucket, file_data_identity) |
|
|
removed_files.append(f'{s3_data_bucket}:{file_data_identity}') |
|
|
|
|
|
|
|
|
removed_files.append( |
|
|
|
|
|
f'{s3_data_bucket}:{file_data_identity}') |
|
|
except ResponseError as e: |
|
|
except ResponseError as e: |
|
|
print(f'ERROR: {s3_data_bucket}:{file_data_identity} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
|
print( |
|
|
|
|
|
f'ERROR: {s3_data_bucket}:{file_data_identity} {e}', file=sys.stderr) |
|
|
|
|
|
|
|
|
print(os.linesep.join(removed_files)) |
|
|
print(os.linesep.join(removed_files)) |
|
|
|
|
|
|
|
@ -691,7 +703,8 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
""" |
|
|
""" |
|
|
if keep_smaller_input: |
|
|
if keep_smaller_input: |
|
|
command = f"cp {input_file} {output_file}" |
|
|
command = f"cp {input_file} {output_file}" |
|
|
return lambda: |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def task(): |
|
|
input_size = os.path.getsize(input_file) |
|
|
input_size = os.path.getsize(input_file) |
|
|
output_size = os.path.getsize(output_file) |
|
|
output_size = os.path.getsize(output_file) |
|
|
if output_size > input_size: |
|
|
if output_size > input_size: |
|
@ -706,9 +719,9 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
)] |
|
|
)] |
|
|
) |
|
|
) |
|
|
) |
|
|
) |
|
|
|
|
|
return task |
|
|
return None |
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for input_file in files: |
|
|
for input_file in files: |
|
|
for content_configuration in content_configurations: |
|
|
for content_configuration in content_configurations: |
|
|
if any([input_file.endswith(extension) for extension in content_configuration['extensions']]): |
|
|
if any([input_file.endswith(extension) for extension in content_configuration['extensions']]): |
|
@ -721,26 +734,31 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
and content_configuration['preserveInputExtension']: |
|
|
and content_configuration['preserveInputExtension']: |
|
|
output_file = os.path.join(destination, file) |
|
|
output_file = os.path.join(destination, file) |
|
|
else: |
|
|
else: |
|
|
output_file_without_ext = os.path.splitext(os.path.join(destination, file))[0] |
|
|
|
|
|
|
|
|
output_file_without_ext = os.path.splitext( |
|
|
|
|
|
os.path.join(destination, file))[0] |
|
|
output_file = f'{output_file_without_ext}.{content_configuration["outputExtension"]}' |
|
|
output_file = f'{output_file_without_ext}.{content_configuration["outputExtension"]}' |
|
|
|
|
|
|
|
|
output_file_identity = get_file_identity({'REMOVE_PREFIX': destination}, output_file) |
|
|
|
|
|
|
|
|
output_file_identity = get_file_identity( |
|
|
|
|
|
{'REMOVE_PREFIX': destination}, output_file) |
|
|
|
|
|
|
|
|
output_file_dir = os.path.dirname(output_file) |
|
|
output_file_dir = os.path.dirname(output_file) |
|
|
os.makedirs(output_file_dir, exist_ok=True) |
|
|
os.makedirs(output_file_dir, exist_ok=True) |
|
|
|
|
|
|
|
|
if 'preserveSmallerInput' in content_configuration: |
|
|
if 'preserveSmallerInput' in content_configuration: |
|
|
preserve_smaller_input = bool(content_configuration['preserveSmallerInput']) |
|
|
|
|
|
|
|
|
preserve_smaller_input = bool( |
|
|
|
|
|
content_configuration['preserveSmallerInput']) |
|
|
else: |
|
|
else: |
|
|
preserve_smaller_input = True |
|
|
preserve_smaller_input = True |
|
|
|
|
|
|
|
|
if 'forcePreserveSmallerInput' in content_configuration: |
|
|
if 'forcePreserveSmallerInput' in content_configuration: |
|
|
force_preserve_smaller_input = bool(content_configuration['forcePreserveSmallerInput']) |
|
|
|
|
|
|
|
|
force_preserve_smaller_input = bool( |
|
|
|
|
|
content_configuration['forcePreserveSmallerInput']) |
|
|
else: |
|
|
else: |
|
|
force_preserve_smaller_input = False |
|
|
force_preserve_smaller_input = False |
|
|
|
|
|
|
|
|
# Only preserve the input if requested AND the extensions of the input and the output match |
|
|
# Only preserve the input if requested AND the extensions of the input and the output match |
|
|
preserve_smaller_input = preserve_smaller_input and (force_preserve_smaller_input or file_extension == content_configuration["outputExtension"]) |
|
|
|
|
|
|
|
|
preserve_smaller_input = preserve_smaller_input and ( |
|
|
|
|
|
force_preserve_smaller_input or file_extension == content_configuration["outputExtension"]) |
|
|
|
|
|
|
|
|
command: str = content_configuration['command'] \ |
|
|
command: str = content_configuration['command'] \ |
|
|
.replace('{input_file}', f'\'{input_file}\'') \ |
|
|
.replace('{input_file}', f'\'{input_file}\'') \ |
|
@ -754,7 +772,7 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
on_success=[store_filename( |
|
|
on_success=[store_filename( |
|
|
task_output, |
|
|
task_output, |
|
|
f'{input_file}\t{output_file_identity}' if print_input_and_identity else output_file |
|
|
f'{input_file}\t{output_file_identity}' if print_input_and_identity else output_file |
|
|
),queue_follow_up_task_if_keep_smaller_input( |
|
|
|
|
|
|
|
|
), queue_follow_up_task_if_keep_smaller_input( |
|
|
follow_up_tasks, |
|
|
follow_up_tasks, |
|
|
input_file, |
|
|
input_file, |
|
|
output_file, |
|
|
output_file, |
|
|