Tooling for managing asset compression, storage, and retrieval
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

800 lines
26 KiB

2 years ago
2 years ago
  1. #!/usr/bin/env python
  2. import asyncio
  3. import collections.abc
  4. import hashlib
  5. import io
  6. import json
  7. import os
  8. import pathlib
  9. import platform
  10. import sys
  11. import tempfile
  12. from typing import List, Dict, Callable
  13. import click
  14. from minio import Minio, ResponseError
  15. from minio.error import NoSuchKey
  16. # Size of the buffer to read files with
  17. BUF_SIZE = 4096
  18. # Application Version
  19. VERSION = "1.4.0"
  20. ###########
  21. # AsyncIO #
  22. ###########
  23. async def run_command_shell(
  24. command,
  25. stdout=asyncio.subprocess.PIPE,
  26. stderr=asyncio.subprocess.PIPE,
  27. on_success: List[Callable] = [()]
  28. ):
  29. """Run command in subprocess (shell).
  30. Note:
  31. This can be used if you wish to execute e.g. "copy"
  32. on Windows, which can only be executed in the shell.
  33. """
  34. process = await asyncio.create_subprocess_shell(
  35. command, stdout=stdout, stderr=stderr
  36. )
  37. process_stdout, process_stderr = await process.communicate()
  38. if process.returncode == 0:
  39. for success_callable in on_success:
  40. success_callable()
  41. if stdout != asyncio.subprocess.DEVNULL:
  42. result = process_stdout.decode().strip()
  43. return result
  44. else:
  45. return None
  46. def make_chunks(tasks, chunk_size):
  47. """Yield successive chunk_size-sized chunks from tasks.
  48. Note:
  49. Taken from https://stackoverflow.com/a/312464
  50. modified for python 3 only
  51. """
  52. for i in range(0, len(tasks), chunk_size):
  53. yield tasks[i: i + chunk_size]
  54. def run_asyncio_commands(tasks, max_concurrent_tasks=0):
  55. """Run tasks asynchronously using asyncio and return results.
  56. If max_concurrent_tasks are set to 0, no limit is applied.
  57. Note:
  58. By default, Windows uses SelectorEventLoop, which does not support
  59. subprocesses. Therefore ProactorEventLoop is used on Windows.
  60. https://docs.python.org/3/library/asyncio-eventloops.html#windows
  61. """
  62. all_results = []
  63. if max_concurrent_tasks == 0:
  64. chunks = [tasks]
  65. num_chunks = len(chunks)
  66. else:
  67. chunks = make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks)
  68. num_chunks = len(
  69. list(make_chunks(tasks=tasks, chunk_size=max_concurrent_tasks)))
  70. if asyncio.get_event_loop().is_closed():
  71. asyncio.set_event_loop(asyncio.new_event_loop())
  72. if platform.system() == "Windows":
  73. asyncio.set_event_loop(asyncio.ProactorEventLoop())
  74. loop = asyncio.get_event_loop()
  75. chunk = 1
  76. for tasks_in_chunk in chunks:
  77. commands = asyncio.gather(*tasks_in_chunk)
  78. results = loop.run_until_complete(commands)
  79. all_results += results
  80. chunk += 1
  81. loop.close()
  82. return all_results
  83. ###########
  84. # Helpers #
  85. ###########
  86. def update(d, u):
  87. for k, v in u.items():
  88. if isinstance(v, collections.abc.Mapping):
  89. d[k] = update(d.get(k, {}), v)
  90. else:
  91. d[k] = v
  92. return d
  93. def get_metadata_name(key):
  94. return METADATA_PREFIX + 'SHA256SUM'.capitalize()
  95. def get_clean_stdin_iterator(stdin_stream):
  96. return (line for line in [line.strip() for line in stdin_stream if line.strip() != ''])
  97. def strip_prefix(prefix: str, file: str) -> str:
  98. if file.startswith(prefix):
  99. return file.replace(prefix, '')
  100. return file
  101. def get_file_identity(ctx_obj, file):
  102. if 'REMOVE_PREFIX' in ctx_obj and ctx_obj['REMOVE_PREFIX'] is not None:
  103. path = strip_prefix(ctx_obj['REMOVE_PREFIX'], file)
  104. else:
  105. path = file
  106. if os.pathsep != '/':
  107. path = '/'.join(path.split(os.pathsep))
  108. return path
  109. def list_s3_dir(s3: Minio, bucket: str, prefix: str) -> List[str]:
  110. found_files = []
  111. for obj in s3.list_objects_v2(bucket, prefix=prefix):
  112. if obj.is_dir:
  113. found_files.extend(list_s3_dir(s3, bucket, obj.object_name))
  114. else:
  115. found_files.append(obj.object_name)
  116. return found_files
  117. def get_s3_client(config: any) -> Minio:
  118. host = config['host']
  119. secure = config['secure']
  120. access_key = config['access']
  121. secret_key = config['secret']
  122. return Minio(host, secure=secure, access_key=access_key, secret_key=secret_key)
  123. def prep_s3(ctx):
  124. s3_config = ctx.obj['CONFIG']['s3']
  125. s3_bucket = ctx.obj['CONTEXT']
  126. s3 = get_s3_client(s3_config)
  127. if not s3.bucket_exists(s3_bucket):
  128. s3.make_bucket(s3_bucket)
  129. return s3_bucket, s3
  130. def get_file_sha256sum(stored_data, profile, file):
  131. stored_file_hash = stored_data['sha256sum']
  132. stored_profile_hash = stored_data['profileHash']
  133. sha256sum = hashlib.sha256()
  134. with open(file, 'rb') as f:
  135. for byte_block in iter(lambda: f.read(BUF_SIZE), b""):
  136. sha256sum.update(byte_block)
  137. calculated_file_hash = sha256sum.hexdigest()
  138. return stored_profile_hash, stored_file_hash, calculated_file_hash
  139. def get_string_sha256sum(string: str, encoding='utf-8') -> str:
  140. sha256sum = hashlib.sha256()
  141. with io.BytesIO(json.dumps(string).encode(encoding)) as c:
  142. for byte_block in iter(lambda: c.read(BUF_SIZE), b''):
  143. sha256sum.update(byte_block)
  144. return sha256sum.hexdigest()
  145. def add_nested_key(config: Dict[str, any], path: List[str], value: str) -> bool:
  146. target = path[0].lower()
  147. if len(path) == 1:
  148. config[target] = value
  149. return True
  150. else:
  151. if target not in config:
  152. config[target] = {}
  153. add_nested_key(config[target], path[1:], value)
  154. return False
  155. def read_env_config(prefix, separator='__') -> any:
  156. prefix = prefix+separator
  157. env_config = {}
  158. environment_variables = [
  159. env for env in os.environ.keys() if env.startswith(prefix)]
  160. for env in environment_variables:
  161. path = env[len(prefix):].split('__')
  162. add_nested_key(env_config, path, os.environ[env])
  163. return env_config
  164. def load_config(path: str) -> any:
  165. combined_config = {}
  166. with open(
  167. os.path.join(
  168. os.path.dirname(os.path.realpath(__file__)),
  169. 'acm-config-default.json'),
  170. 'r') as combined_config_file:
  171. combined_config = json.load(combined_config_file)
  172. config = {}
  173. with open(path, 'r') as config_file:
  174. config = json.load(config_file)
  175. # Setup concurrency
  176. if 'concurrency' in config:
  177. config['concurrency'] = abs(int(config['concurrency']))
  178. else:
  179. config['concurrency'] = 0
  180. update(combined_config, config)
  181. update(combined_config, read_env_config('ACM'))
  182. # Calculate profiles hash
  183. profile_hashes = {}
  184. profile_hashes['all'] = get_string_sha256sum(
  185. json.dumps(combined_config['profiles']))
  186. for profile in combined_config['profiles'].keys():
  187. profile_hashes[profile] = get_string_sha256sum(
  188. json.dumps(combined_config['profiles'][profile]))
  189. combined_config['profileHashes'] = profile_hashes
  190. return combined_config
  191. @click.group()
  192. @click.option('-d', '--debug/--no-debug', default=False)
  193. @click.option('-c', '--config', default=lambda: os.path.join(os.getcwd(), 'acm-config.json'), show_default=True)
  194. @click.option('-s', '--stdin/--no-stdin', default=False)
  195. @click.option('--remove-prefix', default=None)
  196. @click.option('--add-prefix', default=None)
  197. @click.pass_context
  198. def cli(ctx, debug, config, stdin, remove_prefix, add_prefix):
  199. ctx.ensure_object(dict)
  200. ctx.obj['DEBUG'] = debug
  201. ctx.obj['CONFIG'] = load_config(config)
  202. ctx.obj['READ_STDIN'] = stdin
  203. ctx.obj['REMOVE_PREFIX'] = remove_prefix
  204. ctx.obj['ADD_PREFIX'] = add_prefix
  205. ####################
  206. # Generic Commands #
  207. ####################
  208. @cli.command(name="config")
  209. @click.pass_context
  210. def print_config(ctx):
  211. """
  212. Print the configuration
  213. """
  214. print(json.dumps(ctx.obj['CONFIG'], indent=2, sort_keys=True))
  215. ###############################
  216. # S3 Storage Focused Commands #
  217. ###############################
  218. @cli.command(name="list")
  219. @click.option('--sha256sum/--no-sha256sum', default=False)
  220. @click.option('--suffix', default=None)
  221. @click.option('-x', '--context', required=True)
  222. @click.option('--print-identity/--no-print-identity', default=False)
  223. @click.pass_context
  224. def list_files(ctx, context, sha256sum, suffix, print_identity):
  225. """
  226. List all file object in a bucket
  227. """
  228. ctx.obj['CONTEXT'] = context
  229. s3_config = ctx.obj['CONFIG']['s3']
  230. s3_bucket = ctx.obj['CONTEXT']
  231. s3 = get_s3_client(s3_config)
  232. if not s3.bucket_exists(s3_bucket):
  233. s3.make_bucket(s3_bucket)
  234. found_files: List[str] = []
  235. found_objects: List[str] = []
  236. for obj in s3.list_objects_v2(s3_bucket, recursive=False):
  237. if obj.is_dir:
  238. found_objects.extend(list_s3_dir(s3, s3_bucket, obj.object_name))
  239. else:
  240. found_objects.append(obj.object_name)
  241. for obj in found_objects:
  242. file = obj
  243. if 'REMOVE_PREFIX' in ctx.obj and ctx.obj['REMOVE_PREFIX'] is not None:
  244. file = os.path.join(ctx.obj['REMOVE_PREFIX'], file)
  245. if suffix is not None and suffix in file:
  246. file = file.replace(suffix, '')
  247. file = file.strip()
  248. if sha256sum:
  249. file_object = s3.get_object(s3_bucket, obj)
  250. stored_data = json.load(file_object)
  251. sha256sum_value = stored_data['sha256sum']
  252. file = f'{sha256sum_value} {file}'
  253. elif print_identity:
  254. file_object = s3.get_object(s3_bucket, obj)
  255. stored_data = json.load(file_object)
  256. found_files.append(stored_data['storedAssetIdentity'])
  257. else:
  258. found_files.append(file)
  259. print(os.linesep.join(found_files))
  260. @cli.command(name="match")
  261. @click.option('-x', '--context', required=True)
  262. @click.option('--print-identity/--no-print-identity', default=False)
  263. @click.option('-p', '--profile', default='all')
  264. @click.argument('files', nargs=-1)
  265. @click.pass_context
  266. def check_matched_files_hashes(ctx, context, print_identity, profile, files):
  267. """
  268. List all files that have matching stored sha256sum and profile hash
  269. """
  270. ctx.obj['CONTEXT'] = context
  271. s3_bucket, s3 = prep_s3(ctx)
  272. matching_files: List[str] = []
  273. if ctx.obj['READ_STDIN']:
  274. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  275. for file in files:
  276. file_identity = f'{get_file_identity(ctx.obj, file)}.json'
  277. try:
  278. file_object = s3.get_object(s3_bucket, file_identity)
  279. stored_data = json.load(file_object)
  280. stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(
  281. stored_data, profile, file)
  282. if calculated_file_hash == stored_file_hash \
  283. and ctx.obj['CONFIG']['profileHashes'][profile] == stored_profile_hash:
  284. if print_identity:
  285. matching_files.append(stored_data['storedAssetIdentity'])
  286. else:
  287. matching_files.append(file)
  288. except NoSuchKey as e:
  289. continue
  290. except ValueError or ResponseError as e:
  291. print(f'ERROR: {file} {e}')
  292. print(os.linesep.join(matching_files))
  293. @cli.command(name="check")
  294. @click.option('-x', '--context', required=True)
  295. @click.option('-p', '--profile', default='all')
  296. @click.argument('files', nargs=-1)
  297. @click.pass_context
  298. def check_changed_files_hashes(ctx, context, profile, files):
  299. """
  300. List all files that do not have a matching sha256sum or profile hash
  301. """
  302. ctx.obj['CONTEXT'] = context
  303. s3_bucket, s3 = prep_s3(ctx)
  304. changed_files: List[str] = []
  305. if ctx.obj['READ_STDIN']:
  306. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  307. for file in files:
  308. file_identity = f'{get_file_identity(ctx.obj, file)}.json'
  309. try:
  310. file_object = s3.get_object(s3_bucket, file_identity)
  311. stored_data = json.load(file_object)
  312. stored_profile_hash, stored_file_hash, calculated_file_hash = get_file_sha256sum(
  313. stored_data, profile, file)
  314. if calculated_file_hash != stored_file_hash \
  315. or ctx.obj['CONFIG']['profileHashes'][profile] != stored_profile_hash:
  316. changed_files.append(file)
  317. except NoSuchKey as e:
  318. changed_files.append(file)
  319. except ValueError or ResponseError as e:
  320. print(f'ERROR: {file} {e}')
  321. print(os.linesep.join(changed_files))
  322. @cli.command(name="update")
  323. @click.option('-x', '--context', required=True)
  324. @click.option('--input-and-identity/--no-input-and-identity', default=False)
  325. @click.option('-p', '--profile', default='all')
  326. @click.argument('files', nargs=-1)
  327. @click.pass_context
  328. def update_changed_files_hashes(ctx, context, input_and_identity, profile, files):
  329. """
  330. Store new data objects for the provided files
  331. """
  332. ctx.obj['CONTEXT'] = context
  333. s3_bucket, s3 = prep_s3(ctx)
  334. updated_files: List[str] = []
  335. if ctx.obj['READ_STDIN']:
  336. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  337. for file in files:
  338. identity = None
  339. if input_and_identity:
  340. file, identity = file.split('\t')
  341. file_identity = f'{get_file_identity(ctx.obj, file)}.json'
  342. try:
  343. sha256sum = hashlib.sha256()
  344. with open(file, 'rb') as f:
  345. for byte_block in iter(lambda: f.read(BUF_SIZE), b''):
  346. sha256sum.update(byte_block)
  347. calculated_file_hash = sha256sum.hexdigest()
  348. object_data = {
  349. "sourcePath": file,
  350. "storedAssetIdentity": identity,
  351. "identity": file_identity,
  352. "sha256sum": calculated_file_hash,
  353. "profileHash": ctx.obj['CONFIG']['profileHashes'][profile]
  354. }
  355. with io.BytesIO(json.dumps(object_data, sort_keys=True, indent=None).encode('utf-8')) as data:
  356. data.seek(0, os.SEEK_END)
  357. data_length = data.tell()
  358. data.seek(0)
  359. s3.put_object(
  360. s3_bucket,
  361. file_identity,
  362. data,
  363. data_length,
  364. content_type="application/json",
  365. metadata={}
  366. )
  367. updated_files.append(file)
  368. except ValueError or ResponseError as e:
  369. print(f'ERROR: {file} {e}')
  370. print(os.linesep.join(updated_files))
  371. @cli.command(name="store")
  372. @click.option('-x', '--context', required=True)
  373. @click.argument('files', nargs=-1)
  374. @click.pass_context
  375. def store_files(ctx, context, files):
  376. """
  377. Store specified files in a <context> bucket for retrieval.
  378. """
  379. ctx.obj['CONTEXT'] = context
  380. s3_bucket, s3 = prep_s3(ctx)
  381. stored_files: List[str] = []
  382. if ctx.obj['READ_STDIN']:
  383. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  384. for file in files:
  385. file_identity = get_file_identity(ctx.obj, file)
  386. try:
  387. s3.fput_object(
  388. s3_bucket,
  389. file_identity,
  390. file,
  391. content_type="application/octet-stream"
  392. )
  393. if 'ADD_PREFIX' in ctx.obj and ctx.obj['ADD_PREFIX'] is not None:
  394. stored_files.append(os.path.join(
  395. ctx.obj['ADD_PREFIX'], file_identity))
  396. else:
  397. stored_files.append(file)
  398. except ResponseError as e:
  399. print(f'ERROR: {file} {e}', file=sys.stderr)
  400. print(os.linesep.join(stored_files))
  401. @cli.command(name="retrieve")
  402. @click.option('-x', '--context', required=True)
  403. @click.option('-d', '--destination', default=None)
  404. @click.argument('files', nargs=-1)
  405. @click.pass_context
  406. def retrieve_files(ctx, context, destination, files):
  407. """
  408. Retrieve specified files from a <context> bucket
  409. """
  410. ctx.obj['CONTEXT'] = context
  411. s3_bucket, s3 = prep_s3(ctx)
  412. retrieved_files: List[str] = []
  413. if ctx.obj['READ_STDIN']:
  414. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  415. for file in files:
  416. file_identity = get_file_identity(ctx.obj, file)
  417. file_destination = file
  418. if destination is not None:
  419. file_destination = os.path.join(destination, file_identity)
  420. try:
  421. s3.fget_object(
  422. s3_bucket,
  423. file_identity,
  424. file_destination
  425. )
  426. retrieved_files.append(file_destination)
  427. except NoSuchKey as e:
  428. print(
  429. f'ERROR: {file_identity} {file_destination} {e}', file=sys.stderr)
  430. except ResponseError as e:
  431. print(f'ERROR: {file_destination} {e}', file=sys.stderr)
  432. print(os.linesep.join(retrieved_files))
  433. @cli.command(name="clean")
  434. @click.option('-x', '--context', required=True)
  435. @click.option('-d', '--context-data', default=None)
  436. @click.option('-n', '--dry-run/--no-dry-run', default=False)
  437. @click.argument('files', nargs=-1)
  438. @click.pass_context
  439. def clean_files(ctx, context, context_data, dry_run, files):
  440. """
  441. Remove non matching specified files in a <context> bucket for retrieval.
  442. """
  443. ctx.obj['CONTEXT'] = context
  444. s3_bucket, s3 = prep_s3(ctx)
  445. s3_data_bucket = context_data
  446. found_files: List[str] = []
  447. found_data_files: List[str] = []
  448. removed_files: List[str] = []
  449. if ctx.obj['READ_STDIN']:
  450. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  451. # Go through and find all matching files
  452. for file in files:
  453. file_identity = f'{get_file_identity(ctx.obj, file)}.json'
  454. try:
  455. if s3_data_bucket is not None:
  456. file_object = s3.get_object(s3_bucket, file_identity)
  457. stored_data = json.load(file_object)
  458. stored_data_file_identity = stored_data['storedAssetIdentity']
  459. found_files.append(file_identity)
  460. found_data_files.append(stored_data_file_identity)
  461. else:
  462. file_object = s3.get_object(s3_bucket, file_identity)
  463. found_files.append(file_identity)
  464. except ResponseError as e:
  465. print(f'ERROR: ResponseError {file_identity} {e}', file=sys.stderr)
  466. except NoSuchKey as e:
  467. print(f'ERROR: NoSuchKey {file_identity}', file=sys.stderr)
  468. # print(os.linesep.join(found_objects))
  469. # print(os.linesep.join(found_objects))
  470. found_files = set(found_files)
  471. found_data_files = set(found_data_files)
  472. # Find all objects in s3 bucket
  473. found_objects: List[str] = []
  474. for obj in s3.list_objects_v2(s3_bucket, recursive=False):
  475. if obj.is_dir:
  476. found_objects.extend(list_s3_dir(s3, s3_bucket, obj.object_name))
  477. else:
  478. found_objects.append(obj.object_name)
  479. # print(os.linesep.join(found_objects))
  480. found_data_objects: List[str] = []
  481. for obj in s3.list_objects_v2(s3_data_bucket, recursive=False):
  482. if obj.is_dir:
  483. found_data_objects.extend(list_s3_dir(
  484. s3, s3_data_bucket, obj.object_name))
  485. else:
  486. found_data_objects.append(obj.object_name)
  487. # print(os.linesep.join(found_data_objects))
  488. for file_identity in found_objects:
  489. if file_identity not in found_files:
  490. if dry_run:
  491. removed_files.append(f'{s3_bucket}:{file_identity}')
  492. else:
  493. try:
  494. s3.remove_object(s3_bucket, file_identity)
  495. removed_files.append(f'{s3_bucket}:{file_identity}')
  496. except ResponseError as e:
  497. print(
  498. f'ERROR: {s3_bucket}:{file_identity} {e}', file=sys.stderr)
  499. for file_data_identity in found_data_objects:
  500. if file_data_identity not in found_data_files:
  501. if dry_run:
  502. removed_files.append(f'{s3_data_bucket}:{file_data_identity}')
  503. else:
  504. try:
  505. s3.remove_object(s3_data_bucket, file_data_identity)
  506. removed_files.append(
  507. f'{s3_data_bucket}:{file_data_identity}')
  508. except ResponseError as e:
  509. print(
  510. f'ERROR: {s3_data_bucket}:{file_data_identity} {e}', file=sys.stderr)
  511. print(os.linesep.join(removed_files))
  512. ######################################
  513. # Asset Compression Focused Commands #
  514. ######################################
  515. @cli.command(name="compress")
  516. @click.option('-p', '--profile', default='default')
  517. @click.option('-c', '--content', default='all')
  518. @click.option('-d', '--destination', default=None)
  519. @click.option('--print-input-and-identity/--no-print-input-and-identity', default=False)
  520. @click.argument('files', nargs=-1)
  521. @click.pass_context
  522. def compress_assets(ctx, profile, content, destination, print_input_and_identity, files):
  523. """
  524. Compress the request files and store them in a storage bucket.
  525. """
  526. profiles = ctx.obj['CONFIG']['profiles']
  527. if profile not in profiles:
  528. raise ValueError(f'Unrecognized profile: {profile}')
  529. default_profile: Dict[str, any] = profiles['default']
  530. profile: Dict[str, any] = profiles[profile]
  531. if content != 'all':
  532. if content not in profile and content not in default_profile:
  533. raise ValueError(f'Unrecognized content: {content}')
  534. content_configurations = []
  535. if content == 'all':
  536. content_names: set = set()
  537. for content_name in profile.keys():
  538. content_names.add(content_name)
  539. content_configurations.append(profile[content_name])
  540. for content_name in default_profile.keys():
  541. if content_name not in content_names:
  542. content_names.add(content_name)
  543. content_configurations.append(default_profile[content_name])
  544. else:
  545. if content in profile:
  546. content_configurations.append(profile[content])
  547. else:
  548. content_configurations.append(default_profile[content])
  549. if ctx.obj['READ_STDIN']:
  550. files = get_clean_stdin_iterator(click.get_text_stream('stdin'))
  551. if destination is None:
  552. destination = tempfile.mkdtemp()
  553. task_output = []
  554. tasks = []
  555. follow_up_tasks = []
  556. def store_filename(storage_list: List[str], filename: str):
  557. """
  558. A simple lambda wrapper to asynchronously add processed files to the list
  559. :param storage_list:
  560. :param filename:
  561. :return:
  562. """
  563. return lambda: storage_list.append(filename)
  564. def queue_follow_up_task_if_keep_smaller_input(follow_up_tasks, input_file: str, output_file: str, keep_smaller_input: bool = True):
  565. """
  566. A lambda wrapper that handles keeping the smallest of the two files.
  567. """
  568. if keep_smaller_input:
  569. command = f"cp {input_file} {output_file}"
  570. def task():
  571. input_size = os.path.getsize(input_file)
  572. output_size = os.path.getsize(output_file)
  573. if output_size > input_size:
  574. follow_up_tasks.append(
  575. run_command_shell(
  576. command,
  577. stdout=asyncio.subprocess.DEVNULL,
  578. stderr=asyncio.subprocess.DEVNULL,
  579. on_success=[store_filename(
  580. task_output,
  581. f'Preserved smaller "{input_file}" {output_size} > {input_size}'
  582. )]
  583. )
  584. )
  585. return task
  586. return lambda: True
  587. for input_file in files:
  588. for content_configuration in content_configurations:
  589. if any([input_file.endswith(extension) for extension in content_configuration['extensions']]):
  590. file = input_file
  591. file_extension = pathlib.Path(input_file).suffix
  592. if 'REMOVE_PREFIX' in ctx.obj and ctx.obj['REMOVE_PREFIX'] is not None:
  593. file = strip_prefix(ctx.obj['REMOVE_PREFIX'], input_file)
  594. if 'preserveInputExtension' in content_configuration \
  595. and content_configuration['preserveInputExtension']:
  596. output_file = os.path.join(destination, file)
  597. else:
  598. output_file_without_ext = os.path.splitext(
  599. os.path.join(destination, file))[0]
  600. output_file = f'{output_file_without_ext}.{content_configuration["outputExtension"]}'
  601. output_file_identity = get_file_identity(
  602. {'REMOVE_PREFIX': destination}, output_file)
  603. output_file_dir = os.path.dirname(output_file)
  604. os.makedirs(output_file_dir, exist_ok=True)
  605. if 'preserveSmallerInput' in content_configuration:
  606. preserve_smaller_input = bool(
  607. content_configuration['preserveSmallerInput'])
  608. else:
  609. preserve_smaller_input = True
  610. if 'forcePreserveSmallerInput' in content_configuration:
  611. force_preserve_smaller_input = bool(
  612. content_configuration['forcePreserveSmallerInput'])
  613. else:
  614. force_preserve_smaller_input = False
  615. # Only preserve the input if requested AND the extensions of the input and the output match
  616. preserve_smaller_input = preserve_smaller_input and (
  617. force_preserve_smaller_input or file_extension == content_configuration["outputExtension"])
  618. command: str = content_configuration['command'] \
  619. .replace('{input_file}', f'\'{input_file}\'') \
  620. .replace('{output_file}', f'\'{output_file}\'')
  621. tasks.append(
  622. run_command_shell(
  623. command,
  624. stdout=asyncio.subprocess.DEVNULL,
  625. stderr=asyncio.subprocess.DEVNULL,
  626. on_success=[store_filename(
  627. task_output,
  628. f'{input_file}\t{output_file_identity}' if print_input_and_identity else output_file
  629. ), queue_follow_up_task_if_keep_smaller_input(
  630. follow_up_tasks,
  631. input_file,
  632. output_file,
  633. preserve_smaller_input
  634. )]
  635. )
  636. )
  637. results = run_asyncio_commands(
  638. tasks, max_concurrent_tasks=ctx.obj['CONFIG']['concurrency']
  639. )
  640. follow_up_results = run_asyncio_commands(
  641. follow_up_tasks, max_concurrent_tasks=ctx.obj['CONFIG']['concurrency']
  642. )
  643. print(os.linesep.join(task_output))
  644. if __name__ == '__main__':
  645. cli(obj={})