|
|
@ -5,6 +5,7 @@ import hashlib |
|
|
|
import io |
|
|
|
import json |
|
|
|
import os |
|
|
|
import pathlib |
|
|
|
import platform |
|
|
|
import sys |
|
|
|
import tempfile |
|
|
@ -18,7 +19,7 @@ from minio.error import NoSuchKey |
|
|
|
BUF_SIZE = 4096 |
|
|
|
|
|
|
|
#Application Version |
|
|
|
VERSION = "1.3.1" |
|
|
|
VERSION = "1.4.0" |
|
|
|
|
|
|
|
|
|
|
|
########### |
|
|
@ -27,7 +28,7 @@ VERSION = "1.3.1" |
|
|
|
|
|
|
|
|
|
|
|
async def run_command_shell( |
|
|
|
command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, on_success: Callable = ()): |
|
|
|
command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, on_success: List[Callable] = [()]): |
|
|
|
"""Run command in subprocess (shell). |
|
|
|
|
|
|
|
Note: |
|
|
@ -41,7 +42,8 @@ async def run_command_shell( |
|
|
|
process_stdout, process_stderr = await process.communicate() |
|
|
|
|
|
|
|
if process.returncode == 0: |
|
|
|
on_success() |
|
|
|
for callable in on_success: |
|
|
|
callable() |
|
|
|
|
|
|
|
if stdout != asyncio.subprocess.DEVNULL: |
|
|
|
result = process_stdout.decode().strip() |
|
|
@ -669,8 +671,9 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
|
if destination is None: |
|
|
|
destination = tempfile.mkdtemp() |
|
|
|
|
|
|
|
compressed_files = [] |
|
|
|
task_output = [] |
|
|
|
tasks = [] |
|
|
|
follow_up_tasks = [] |
|
|
|
|
|
|
|
def store_filename(storage_list: List[str], filename: str): |
|
|
|
""" |
|
|
@ -682,10 +685,35 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
|
""" |
|
|
|
return lambda: storage_list.append(filename) |
|
|
|
|
|
|
|
def queue_follow_up_task_if_keep_smaller_input(follow_up_tasks, input_file: str, output_file: str, keep_smaller_input: bool = True): |
|
|
|
""" |
|
|
|
A lambda wrapper that handles keeping the smallest of the two files. |
|
|
|
""" |
|
|
|
if keep_smaller_input: |
|
|
|
command = f"cp {input_file} {output_file}" |
|
|
|
return lambda: |
|
|
|
input_size = os.path.getsize(input_file) |
|
|
|
output_size = os.path.getsize(output_file) |
|
|
|
if output_size > input_size: |
|
|
|
follow_up_tasks.append( |
|
|
|
run_command_shell( |
|
|
|
command, |
|
|
|
stdout=asyncio.subprocess.DEVNULL, |
|
|
|
stderr=asyncio.subprocess.DEVNULL, |
|
|
|
on_success=[store_filename( |
|
|
|
task_output, |
|
|
|
f'Preserved smaller "{input_file}" {output_size} > {input_size}' |
|
|
|
)] |
|
|
|
) |
|
|
|
) |
|
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
for input_file in files: |
|
|
|
for content_configuration in content_configurations: |
|
|
|
if any([input_file.endswith(extension) for extension in content_configuration['extensions']]): |
|
|
|
file = input_file |
|
|
|
file_extension = pathlib.Path(input_file).suffix |
|
|
|
if 'REMOVE_PREFIX' in ctx.obj and ctx.obj['REMOVE_PREFIX'] is not None: |
|
|
|
file = strip_prefix(ctx.obj['REMOVE_PREFIX'], input_file) |
|
|
|
|
|
|
@ -701,6 +729,19 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
|
output_file_dir = os.path.dirname(output_file) |
|
|
|
os.makedirs(output_file_dir, exist_ok=True) |
|
|
|
|
|
|
|
if 'preserveSmallerInput' in content_configuration: |
|
|
|
preserve_smaller_input = bool(content_configuration['preserveSmallerInput']) |
|
|
|
else: |
|
|
|
preserve_smaller_input = True |
|
|
|
|
|
|
|
if 'forcePreserveSmallerInput' in content_configuration: |
|
|
|
force_preserve_smaller_input = bool(content_configuration['forcePreserveSmallerInput']) |
|
|
|
else: |
|
|
|
force_preserve_smaller_input = False |
|
|
|
|
|
|
|
# Only preserve the input if requested AND the extensions of the input and the output match |
|
|
|
preserve_smaller_input = preserve_smaller_input and (force_preserve_smaller_input or file_extension == content_configuration["outputExtension"]) |
|
|
|
|
|
|
|
command: str = content_configuration['command'] \ |
|
|
|
.replace('{input_file}', f'\'{input_file}\'') \ |
|
|
|
.replace('{output_file}', f'\'{output_file}\'') |
|
|
@ -710,10 +751,15 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
|
command, |
|
|
|
stdout=asyncio.subprocess.DEVNULL, |
|
|
|
stderr=asyncio.subprocess.DEVNULL, |
|
|
|
on_success=store_filename( |
|
|
|
compressed_files, |
|
|
|
on_success=[store_filename( |
|
|
|
task_output, |
|
|
|
f'{input_file}\t{output_file_identity}' if print_input_and_identity else output_file |
|
|
|
) |
|
|
|
),queue_follow_up_task_if_keep_smaller_input( |
|
|
|
follow_up_tasks, |
|
|
|
input_file, |
|
|
|
output_file, |
|
|
|
preserve_smaller_input |
|
|
|
)] |
|
|
|
) |
|
|
|
) |
|
|
|
|
|
|
@ -721,7 +767,11 @@ def compress_assets(ctx, profile, content, destination, print_input_and_identity |
|
|
|
tasks, max_concurrent_tasks=ctx.obj['CONFIG']['concurrency'] |
|
|
|
) |
|
|
|
|
|
|
|
print(os.linesep.join(compressed_files)) |
|
|
|
follow_up_results = run_asyncio_commands( |
|
|
|
follow_up_tasks, max_concurrent_tasks=ctx.obj['CONFIG']['concurrency'] |
|
|
|
) |
|
|
|
|
|
|
|
print(os.linesep.join(task_output)) |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__': |
|
|
|