Browse Source

Cleanup and bugfix

master 1.4.1
Drew Short 3 years ago
parent
commit
33f3f81759
  1. 62
      acm.py

62
acm.py

@ -80,7 +80,8 @@ def run_asyncio_commands(tasks, max_concurrent_tasks=0):
num_chunks = len(chunks)
else:
chunks = make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks)
num_chunks = len(list(make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks)))
num_chunks = len(
list(make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks)))
if asyncio.get_event_loop().is_closed():
asyncio.set_event_loop(asyncio.new_event_loop())
@ -204,7 +205,8 @@ def read_env_config(prefix, separator='__') -> any:
prefix = prefix+separator
env_config = {}
environment_variables = [env for env in os.environ.keys() if env.startswith(prefix)]
environment_variables = [
env for env in os.environ.keys() if env.startswith(prefix)]
for env in environment_variables:
path = env[len(prefix):].split('__')
@ -212,6 +214,7 @@ def read_env_config(prefix, separator='__') -> any:
return env_config
def load_config(path: str) -> any:
combined_config = {}
with open(
@ -221,7 +224,6 @@ def load_config(path: str) -> any:
'r') as combined_config_file:
combined_config = json.load(combined_config_file)
config = {}
with open(path, 'r') as config_file:
config = json.load(config_file)
@ -237,9 +239,11 @@ def load_config(path: str) -> any:
# Calculate profiles hash
profile_hashes = {}
profile_hashes['all'] = get_string_sha256sum(json.dumps(combined_config['profiles']))
profile_hashes['all'] = get_string_sha256sum(
json.dumps(combined_config['profiles']))
for profile in combined_config['profiles'].keys():
profile_hashes[profile] = get_string_sha256sum(json.dumps(combined_config['profiles'][profile]))
profile_hashes[profile] = get_string_sha256sum(
json.dumps(combined_config['profiles'][profile]))
combined_config['profileHashes'] = profile_hashes
@ -356,7 +360,8 @@ def check_matched_files_hashes(ctx, context, print_identity, profile, files):
try:
file_object = s3.get_object(s3_bucket, file_identity)
stored_data = json.load(file_object)
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(stored_data, profile, file)
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(
stored_data, profile, file)
if calculated_file_hash == stored_file_hash \
and ctx.obj['CONFIG']['profileHashes'][profile] == stored_profile_hash:
if print_identity:
@ -392,7 +397,8 @@ def check_changed_files_hashes(ctx, context, profile, files):
try:
file_object = s3.get_object(s3_bucket, file_identity)
stored_data = json.load(file_object)
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(stored_data, profile, file)
stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(
stored_data, profile, file)
if calculated_file_hash != stored_file_hash \
or ctx.obj['CONFIG']['profileHashes'][profile] != stored_profile_hash:
changed_files.append(file)
@ -485,7 +491,8 @@ def store_files(ctx, context, files):
content_type="application/octet-stream"
)
if 'ADD_PREFIX' in ctx.obj and ctx.obj['ADD_PREFIX'] is not None:
stored_files.append(os.path.join(ctx.obj['ADD_PREFIX'], file_identity))
stored_files.append(os.path.join(
ctx.obj['ADD_PREFIX'], file_identity))
else:
stored_files.append(file)
except ResponseError as e:
@ -523,7 +530,8 @@ def retrieve_files(ctx, context, destination, files):
)
retrieved_files.append(file_destination)
except NoSuchKey as e:
print(f'ERROR: {file_identity} {file_destination} {e}', file=sys.stderr)
print(
f'ERROR: {file_identity} {file_destination} {e}', file=sys.stderr)
except ResponseError as e:
print(f'ERROR: {file_destination} {e}', file=sys.stderr)
@ -587,14 +595,15 @@ def clean_files(ctx, context, context_data, dry_run, files):
found_data_objects: List[str] = []
for obj in s3.list_objects_v2(s3_data_bucket, recursive=False):
if obj.is_dir:
found_data_objects.extend(list_s3_dir(s3, s3_data_bucket, obj.object_name))
found_data_objects.extend(list_s3_dir(
s3, s3_data_bucket, obj.object_name))
else:
found_data_objects.append(obj.object_name)
# print(os.linesep.join(found_data_objects))
for file_identity in found_objects:
if not file_identity in found_files:
if file_identity not in found_files:
if dry_run:
removed_files.append(f'{s3_bucket}:{file_identity}')
else:
@ -602,18 +611,21 @@ def clean_files(ctx, context, context_data, dry_run, files):
s3.remove_object(s3_bucket, file_identity)
removed_files.append(f'{s3_bucket}:{file_identity}')
except ResponseError as e:
print(f'ERROR: {s3_bucket}:{file_identity} {e}', file=sys.stderr)
print(
f'ERROR: {s3_bucket}:{file_identity} {e}', file=sys.stderr)
for file_data_identity in found_data_objects:
if not file_data_identity in found_data_files:
if file_data_identity not in found_data_files:
if dry_run:
removed_files.append(f'{s3_data_bucket}:{file_data_identity}')
else:
try:
s3.remove_object(s3_data_bucket, file_data_identity)
removed_files.append(f'{s3_data_bucket}:{file_data_identity}')
removed_files.append(
f'{s3_data_bucket}:{file_data_identity}')
except ResponseError as e:
print(f'ERROR: {s3_data_bucket}:{file_data_identity} {e}', file=sys.stderr)
print(
f'ERROR: {s3_data_bucket}:{file_data_identity} {e}', file=sys.stderr)
print(os.linesep.join(removed_files))
@ -691,7 +703,8 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity
"""
if keep_smaller_input:
command = f"cp {input_file} {output_file}"
return lambda:
def task():
input_size = os.path.getsize(input_file)
output_size = os.path.getsize(output_file)
if output_size > input_size:
@ -706,9 +719,9 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity
)]
)
)
return task
return None
for input_file in files:
for content_configuration in content_configurations:
if any([input_file.endswith(extension) for extension in content_configuration['extensions']]):
@ -721,26 +734,31 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity
and content_configuration['preserveInputExtension']:
output_file = os.path.join(destination, file)
else:
output_file_without_ext = os.path.splitext(os.path.join(destination, file))[0]
output_file_without_ext = os.path.splitext(
os.path.join(destination, file))[0]
output_file = f'{output_file_without_ext}.{content_configuration["outputExtension"]}'
output_file_identity = get_file_identity({'REMOVE_PREFIX': destination}, output_file)
output_file_identity = get_file_identity(
{'REMOVE_PREFIX': destination}, output_file)
output_file_dir = os.path.dirname(output_file)
os.makedirs(output_file_dir, exist_ok=True)
if 'preserveSmallerInput' in content_configuration:
preserve_smaller_input = bool(content_configuration['preserveSmallerInput'])
preserve_smaller_input = bool(
content_configuration['preserveSmallerInput'])
else:
preserve_smaller_input = True
if 'forcePreserveSmallerInput' in content_configuration:
force_preserve_smaller_input = bool(content_configuration['forcePreserveSmallerInput'])
force_preserve_smaller_input = bool(
content_configuration['forcePreserveSmallerInput'])
else:
force_preserve_smaller_input = False
# Only preserve the input if requested AND the extensions of the input and the output match
preserve_smaller_input = preserve_smaller_input and (force_preserve_smaller_input or file_extension == content_configuration["outputExtension"])
preserve_smaller_input = preserve_smaller_input and (
force_preserve_smaller_input or file_extension == content_configuration["outputExtension"])
command: str = content_configuration['command'] \
.replace('{input_file}', f'\'{input_file}\'') \

Loading…
Cancel
Save