Merge changes from topic "data-download"
* changes:
Improve download of npm packages
Improve download of http files
Improve download of rpm
Improve download of git repositories
Improve docker image download script
diff --git a/build/download/__init__.py b/build/download/__init__.py
new file mode 100644
index 0000000..f0efbc1
--- /dev/null
+++ b/build/download/__init__.py
@@ -0,0 +1,22 @@
+
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
diff --git a/build/download/base.py b/build/download/base.py
new file mode 100644
index 0000000..5bcd0ef
--- /dev/null
+++ b/build/download/base.py
@@ -0,0 +1,114 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
+
+import concurrent.futures
+import os
+import progressbar
+import prettytable
+import requests
+from distutils.spawn import find_executable
+
+progressbar.streams.wrap_stdout()
+progressbar.streams.wrap_stderr()
+
+
+def load_list(item_list):
+ """
+ Parse list with items to be downloaded.
+ :param item_list: File with list of items (1 line per item)
+ :return: set of items from file
+ """
+ with open(item_list, 'r') as f:
+ return {item for item in (line.strip() for line in f) if item}
+
+
+def init_progress(items_name):
+ progress_widgets = ['Downloading {}: '.format(items_name),
+ progressbar.Bar(), ' ',
+ progressbar.Percentage(), ' ',
+ '(', progressbar.SimpleProgress(), ')']
+
+ progress = progressbar.ProgressBar(widgets=progress_widgets,
+ poll_rate=1.0,
+ redirect_stdout=True)
+ return progress
+
+
+def start_progress(progress, target_count, skipping, log):
+ log_skipping(skipping, log)
+ log.info("Initializing download. Takes a while.")
+
+ progress.max_value = target_count
+ progress.start()
+ progress.update(len(skipping))
+
+
+def log_skipping(skipping_iterable, logger):
+ for skipped in skipping_iterable:
+ logger.info('Skipping: {}'.format(skipped))
+
+
+def run_concurrent(workers, progress, fn, iterable, *args):
+ with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
+ futures = [executor.submit(fn, item, *args) for item in iterable]
+ error_count = 0
+ for future in concurrent.futures.as_completed(futures):
+ error = future.exception()
+ if error:
+ error_count += 1
+ progress.update()
+ else:
+ progress.update(progress.value +1)
+ return error_count
+
+
+def finish_progress(progress, error_count, log):
+ progress.finish(dirty=error_count > 0)
+ log.info('Download ended. Elapsed time {}'.format(progress.data()['time_elapsed']))
+
+def check_tool(name):
+ return find_executable(name)
+
+def save_to_file(dst, content):
+ """
+ Save downloaded byte content to file
+ :param dst: path to file to save content to
+ :param content: byte content of file
+ """
+ dst_dir = os.path.dirname(dst)
+ if not os.path.exists(dst_dir):
+ os.makedirs(dst_dir)
+ with open(dst, 'wb') as dst_file:
+ dst_file.write(content)
+
+def make_get_request(url):
+ req = requests.get(url)
+ req.raise_for_status()
+ return req
+
+def simple_check_table(target, missing):
+ table = prettytable.PrettyTable(['Name', 'Downloaded'])
+ table.align['Name'] = 'l'
+ for item in sorted(target):
+ table.add_row([item, item not in missing])
+ return table
+
diff --git a/build/download/docker_images.py b/build/download/docker_images.py
new file mode 100755
index 0000000..e4e742b
--- /dev/null
+++ b/build/download/docker_images.py
@@ -0,0 +1,268 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
+
+import argparse
+import concurrent.futures
+import docker
+import itertools
+import json
+import logging
+import os
+import prettytable
+import sys
+import threading
+from retrying import retry
+
+import base
+
+log = logging.getLogger(__name__)
+
+
+def image_filename(image_name):
+ """
+ Get a name of a file where image will be saved.
+ :param image_name: Name of the image from list
+ :return: Filename of the image
+ """
+ return '{}.tar'.format(image_name.replace(':', '_').replace('/', '_'))
+
+
+def image_registry_name(image_name):
+ """
+ Get the name as shown in local registry. Since some strings are not part of name
+ when using default registry e.g. docker.io
+ :param image_name: name of the image from the list
+ :return: name of the image as it is shown by docker
+ """
+ name = image_name
+
+ if name.startswith('docker.io/'):
+ name = name.replace('docker.io/', '')
+
+ if name.startswith('library/'):
+ name = name.replace('library/', '')
+
+ if ':' not in name.rsplit('/')[-1]:
+ name = '{}:latest'.format(name)
+
+ return name
+
+
+def not_pulled_images(docker_client, target_list):
+ """
+ Get set of images that are not pulled on local system.
+ :param docker_client: docker.client.DockerClient
+ :param target_list: list of images to look for
+ :return: (set) images that are not present on local system
+ """
+ pulled = set(itertools.chain.from_iterable((image.tags for image
+ in docker_client.images.list())))
+ return {image for image in target_list if image_registry_name(image) not in pulled}
+
+
+def not_saved(target_images, target_dir):
+ """
+ Get set of images that are not saved in target directory
+ :param target_images: List of images to check for
+ :param target_dir: Directory where those images should be
+ :return: (set) Images that are missing from target directory
+ """
+ return set(image for image in target_images
+ if not os.path.isfile('/'.join((target_dir, image_filename(image)))))
+
+
+def missing(docker_client, target_list, save, target_dir):
+ """
+ Get dictionary of images not present locally.
+ :param docker_client: docker.client.DockerClient for communication with docker
+ :param target_list: list of desired images
+ :param save: (boolean) check for saved images
+ :param target_dir: target directory for saved images
+ :return: Dictionary of missing images ('not_pulled', 'not_saved')
+ """
+ return {'not_pulled': not_pulled_images(docker_client, target_list),
+ 'not_saved': not_saved(target_list, target_dir) if save else set()}
+
+
+def merge_dict_sets(dictionary):
+ return set.union(*dictionary.values())
+
+
+def check_table(check_list, missing, save):
+ table = prettytable.PrettyTable(['Image', 'Pulled', 'Saved'])
+ table.align['Image'] = 'l'
+ for image in sorted(check_list):
+ pulled = not image in missing['not_pulled']
+ download_state = [pulled]
+ if save:
+ # if not pulled save anyway
+ download_state.append(pulled and not image in missing['not_saved'])
+ else:
+ download_state.append('Not checked')
+ table.add_row([image] + download_state)
+ return table
+
+
+@retry(stop_max_attempt_number=5, wait_fixed=5000)
+def pull_image(docker_client, image_name):
+ """
+ Pull docker image.
+ :param docker_client: docker.client.DockerClient for communication with docker
+ :param image_name: name of the image to be pulled
+ :return: pulled image (image object)
+ :raises docker.errors.APIError: after unsuccessful retries
+ """
+ if ':' not in image_name.rsplit('/')[-1]:
+ image_name = '{}:latest'.format(image_name)
+ try:
+ image = docker_client.images.pull(image_name)
+ log.info('Image {} pulled'.format(image_name))
+ return image
+ except docker.errors.APIError as err:
+ log.warning('Failed: {}: {}. Retrying...'.format(image_name, err))
+ raise err
+
+
+def save_image(image_name, image, output_dir, docker_client=None):
+ """
+ Save image to tar.
+ :param output_dir: path to destination directory
+ :param image: image object from pull_image function
+ :param image_name: name of the image from list
+ :param docker_client: docker.client.DockerClient for communication with docker
+ :return: None
+ """
+ dst = '{}/{}'.format(output_dir, image_filename(image_name))
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ if not isinstance(image, docker.models.images.Image):
+ image = docker_client.images.get(image_name)
+ try:
+ with open(dst, 'wb') as f:
+ for chunk in image.save(named=image_registry_name(image_name)):
+ f.write(chunk)
+ log.info('Image {} saved as {}'.format(image_name, dst))
+ except Exception as err:
+ os.remove(dst)
+ raise err
+
+
+def download_docker_image(image, save, output_dir, docker_client):
+ """ Pull and save docker image from specified docker registry
+ :param docker_client: docker.client.DockerClient for communication with docker
+ :param image: image to be downloaded
+ :param save: boolean - save image to disk or skip saving
+ :param output_dir: directory where image will be saved
+ :return: None
+ """
+ log.info('Downloading image: {}'.format(image))
+ try:
+ pulled_image = pull_image(docker_client, image)
+ if save:
+ save_image(image, pulled_image, output_dir)
+ except Exception as err:
+ log.error('Error downloading {}: {}'.format(image, err))
+ raise err
+
+
+def download(image_list, save, output_dir, check_mode, progress, workers=3):
+ """
+ Download images from list
+ :param image_list: list of images to be downloaded
+ :param save: whether images should be saved to disk
+ :param output_dir: directory where images will be saved
+ :param check_mode: only check for missing images. No download
+ :param progress_bar: progressbar.ProgressBar to show how far download is
+ :return: None
+ """
+ try:
+ docker_client = docker.client.DockerClient(version='auto')
+ except docker.errors.DockerException as err:
+ log.error(err)
+ log.error('Error creating docker client. Check if is docker installed and running'
+ ' or if you have right permissions.')
+ raise err
+
+ target_images = base.load_list(image_list)
+ missing_images = missing(docker_client, target_images, save, output_dir)
+
+ if check_mode:
+ log.info(check_table(target_images, missing_images, save))
+ return
+
+ skipping = target_images - merge_dict_sets(missing_images)
+
+ base.start_progress(progress, len(target_images), skipping, log)
+
+ # if pulling and save is True. Save every pulled image to assure parity
+ error_count = base.run_concurrent(workers, progress, download_docker_image, missing_images['not_pulled'],
+ save, output_dir, docker_client)
+ # only save those that are pulled already but not saved
+ error_count += base.run_concurrent(workers, progress, save_image,
+ missing_images['not_saved'] - missing_images['not_pulled'],
+ None, output_dir, docker_client)
+
+ if error_count > 0:
+ log.error('{} images were not downloaded'.format(error_count))
+ missing_images = missing(docker_client, target_images, save, output_dir)
+ log.info(check_table(merge_dict_sets(missing_images), missing_images, save))
+
+ base.finish_progress(progress, error_count, log)
+
+ return error_count
+
+
+def run_cli():
+ parser = argparse.ArgumentParser(description='Download docker images from list')
+ parser.add_argument('image_list', metavar='image-list',
+ help='File with list of images to download.')
+ parser.add_argument('--save', '-s', action='store_true', default=False,
+ help='Save images (without it only pull is executed)')
+ parser.add_argument('--output-dir', '-o', default=os.getcwd(),
+ help='Download destination')
+ parser.add_argument('--check', '-c', action='store_true', default=False,
+ help='Check what is missing. No download.'
+ 'Use with combination with -s to check saved images as well.')
+ parser.add_argument('--debug', action='store_true', default=False,
+ help='Turn on debug output')
+ parser.add_argument('--workers', type=int, default=3,
+ help='Set maximum workers for parallel download (default: 3)')
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+ else:
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
+
+ progress = base.init_progress('Docker images') if not args.check else None
+ try:
+ sys.exit(download(args.image_list, args.save, args.output_dir, args.check,
+ progress, args.workers))
+ except docker.errors.DockerException:
+ log.error('Irrecoverable error detected.')
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ run_cli()
+
diff --git a/build/download/git_repos.py b/build/download/git_repos.py
new file mode 100755
index 0000000..e388e94
--- /dev/null
+++ b/build/download/git_repos.py
@@ -0,0 +1,93 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HEREE
+
+import argparse
+import subprocess
+import logging
+import sys
+import os
+from retrying import retry
+
+import base
+
+log = logging.getLogger(name=__name__)
+
+@retry(stop_max_attempt_number=5, wait_fixed=5000)
+def clone_repo(dst, repo, branch=None):
+ if branch:
+ command = 'git clone -b {} --single-branch https://{} --bare {}'.format(branch, repo, dst)
+ else:
+ command = 'git clone https://{} --bare {}'.format(repo, dst)
+ log.info('Running: {}'.format(command))
+ log.info(subprocess.check_output(command.split(), stderr=subprocess.STDOUT).decode())
+ log.info('Downloaded: {}'.format(repo))
+
+
+def download(git_list, dst_dir, progress):
+ if not base.check_tool('git'):
+ log.error('ERROR: git is not installed')
+ progress.finish(dirty=True)
+ return 1
+
+ git_set = {tuple(item.split()) for item in base.load_list(git_list)
+ if not item.startswith('#')}
+
+ error_count = 0
+
+ base.start_progress(progress, len(git_set), [], log)
+
+ for repo in git_set:
+ dst = '{}/{}'.format(dst_dir, repo[0])
+ if os.path.isdir(dst):
+ log.warning('Directory {} already exists. Repo probably present'.format(dst))
+ progress.update(progress.value + 1)
+ continue
+ try:
+ clone_repo(dst, *repo)
+ progress.update(progress.value + 1)
+ except subprocess.CalledProcessError as err:
+ log.error(err.output.decode())
+ error_count += 1
+
+ base.finish_progress(progress, error_count, log)
+ if error_count > 0:
+ log.error('{} were not downloaded. Check logs for details'.format(error_count))
+ return error_count
+
+
+def run_cli():
+ parser = argparse.ArgumentParser(description='Download git repositories from list')
+ parser.add_argument('git_list', metavar='git-list',
+ help='File with list of npm packages to download.')
+ parser.add_argument('--output-dir', '-o', default=os.getcwd(),
+ help='Download destination')
+
+ args = parser.parse_args()
+
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
+
+ progress = base.init_progress('git repositories')
+
+ sys.exit(download(args.git_list, args.output_dir, progress))
+
+
+if __name__ == '__main__':
+ run_cli()
diff --git a/build/download/http_files.py b/build/download/http_files.py
new file mode 100755
index 0000000..f5b1e59
--- /dev/null
+++ b/build/download/http_files.py
@@ -0,0 +1,131 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
+
+import argparse
+import concurrent.futures
+import logging
+import os
+import sys
+from retrying import retry
+
+import base
+
+log = logging.getLogger(__name__)
+
+@retry(stop_max_attempt_number=5, wait_fixed=2000)
+def get_file(file_uri):
+ """
+ Get file from the Internet
+ :param file_uri: address of file
+ :return: byte content of file
+ """
+ if not file_uri.startswith('http'):
+ file_uri = 'http://' + file_uri
+ file_req = base.make_get_request(file_uri)
+ return file_req.content
+
+
+def download_file(file_uri, dst_dir):
+ """
+ Download http file and save it to file.
+ :param file_uri: http address of file
+ :param dst_dir: directory where file will be saved
+ """
+ log.info('Downloading: {}'.format(file_uri))
+ dst_path = '{}/{}'.format(dst_dir, file_uri.rsplit('//')[-1])
+ try:
+ file_content = get_file(file_uri)
+ base.save_to_file(dst_path, file_content)
+ except Exception as err:
+ if os.path.isfile(dst_path):
+ os.remove(dst_path)
+ log.error('Error downloading: {}: {}'.format(file_uri, err))
+ raise err
+ log.info('Downloaded: {}'.format(file_uri))
+
+
+def missing(file_set, dst_dir):
+ return {file for file in file_set if not os.path.isfile('{}/{}'.format(dst_dir, file))}
+
+
+def download(data_list, dst_dir, check, progress, workers=None):
+ """
+ Download files specified in data list
+ :param data_list: path to file with list
+ :param dst_dir: destination directory
+ :param check: boolean check mode
+ :param progress: progressbar.ProgressBar to monitor progress
+ :param workers: workers to use for parallel execution
+ :return: 0 if success else number of errors
+ """
+ file_set = base.load_list(data_list)
+ missing_files = missing(file_set, dst_dir)
+ target_count = len(file_set)
+
+ if check:
+ log.info(base.simple_check_table(file_set, missing_files))
+ return 0
+
+ skipping = file_set - missing_files
+
+ base.start_progress(progress, len(file_set), skipping, log)
+
+ error_count = base.run_concurrent(workers, progress, download_file, missing_files, dst_dir)
+
+ if error_count > 0:
+ log.error('{} files were not downloaded. Check log for specific failures.'.format(error_count))
+
+ base.finish_progress(progress, error_count, log)
+
+ return error_count
+
+def run_cli():
+ """
+ Run as cli tool
+ """
+ parser = argparse.ArgumentParser(description='Download http files from list')
+ parser.add_argument('file_list', metavar='file-list',
+ help='File with list of http files to download')
+ parser.add_argument('--output-dir', '-o', default=os.getcwd(),
+ help='Destination directory for saving')
+ parser.add_argument('--check', '-c', action='store_true', default=False,
+ help='Check mode')
+ parser.add_argument('--debug', action='store_true', default=False,
+ help='Turn on debug output')
+ parser.add_argument('--workers', type=int, default=None,
+ help='Set maximum workers for parallel download (default: cores * 5)')
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+ else:
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
+
+ progress = base.init_progress('http files') if not args.check else None
+
+ sys.exit(download(args.file_list, args.output_dir, args.check, progress, args.workers))
+
+
+if __name__ == '__main__':
+ run_cli()
+
diff --git a/build/download/npm_packages.py b/build/download/npm_packages.py
new file mode 100755
index 0000000..c174e2c
--- /dev/null
+++ b/build/download/npm_packages.py
@@ -0,0 +1,121 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
+import argparse
+import concurrent.futures
+import hashlib
+import logging
+import os
+import sys
+from retrying import retry
+
+import base
+
+log = logging.getLogger(name=__name__)
+
+
+@retry(stop_max_attempt_number=5, wait_fixed=5000)
+def get_npm(registry, npm_name, npm_version):
+ npm_url = '{}/{}/{}'.format(registry, npm_name, npm_version)
+ npm_req = base.make_get_request(npm_url)
+ npm_json = npm_req.json()
+ tarball_url = npm_json['dist']['tarball']
+ shasum = npm_json['dist']['shasum']
+ tarball_req = base.make_get_request(tarball_url)
+ tarball = tarball_req.content
+ if hashlib.sha1(tarball).hexdigest() == shasum:
+ return tarball
+ else:
+ raise Exception('{}@{}: Wrong checksum. Retrying...'.format(npm_name, npm_version))
+
+
+def download_npm(npm, registry, dst_dir):
+ log.info('Downloading: {}'.format(npm))
+ npm_name, npm_version = npm.split('@')
+ dst_path = '{}/{}-{}.tgz'.format(dst_dir, npm_name, npm_version)
+ try:
+ tarball = get_npm(registry, *npm.split('@'))
+ base.save_to_file(dst_path, tarball)
+ except Exception as err:
+ if os.path.isfile(dst_path):
+ os.remove(dst_path)
+ log.error('Failed: {}: {}'.format(npm, err))
+ raise err
+ log.info('Downloaded: {}'.format(npm))
+
+
+def missing(npm_set, dst_dir):
+ return {npm for npm in npm_set
+ if not os.path.isfile('{}/{}-{}.tgz'.format(dst_dir, *npm.split('@')))}
+
+
+def download(npm_list, registry, dst_dir, check_mode, progress=None, workers=None):
+ npm_set = base.load_list(npm_list)
+ target_count = len(npm_set)
+ missing_npms = missing(npm_set, dst_dir)
+
+ if check_mode:
+ log.info(base.simple_check_table(npm_set, missing_npms))
+ return 0
+
+ skipping = npm_set - missing_npms
+
+ base.start_progress(progress, len(npm_set), skipping, log)
+ error_count = base.run_concurrent(workers, progress, download_npm, missing_npms, registry, dst_dir)
+
+ if error_count > 0:
+ log.error('{} packages were not downloaded. Check log for specific failures.'.format(error_count))
+
+ base.finish_progress(progress, error_count, log)
+
+ return error_count
+
+
+def run_cli():
+ parser = argparse.ArgumentParser(description='Download npm packages from list')
+ parser.add_argument('npm_list', metavar='npm-list',
+ help='File with list of npm packages to download.')
+ parser.add_argument('--registry', '-r', default='https://registry.npmjs.org',
+ help='Download destination')
+ parser.add_argument('--output-dir', '-o', default=os.getcwd(),
+ help='Download destination')
+ parser.add_argument('--check', '-c', action='store_true', default=False,
+ help='Check what is missing. No download.')
+ parser.add_argument('--debug', action='store_true', default=False,
+ help='Turn on debug output')
+ parser.add_argument('--workers', type=int, default=None,
+ help='Set maximum workers for parallel download (default: cores * 5)')
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+ else:
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
+
+ progress = base.init_progress('npm packages') if not args.check else None
+ sys.exit(download(args.npm_list, args.registry, args.output_dir, args.check, progress,
+ args.workers))
+
+
+if __name__ == '__main__':
+ run_cli()
+
diff --git a/build/download/requirements.txt b/build/download/requirements.txt
new file mode 100644
index 0000000..875f0c6
--- /dev/null
+++ b/build/download/requirements.txt
@@ -0,0 +1,5 @@
+docker==3.7.2
+futures==3.2.0; python_version == '2.7'
+prettytable==0.7.2
+progressbar2==3.39.3
+retrying==1.3.3
diff --git a/build/download/rpm_packages.py b/build/download/rpm_packages.py
new file mode 100755
index 0000000..7f9700a
--- /dev/null
+++ b/build/download/rpm_packages.py
@@ -0,0 +1,66 @@
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# COPYRIGHT NOTICE STARTS HERE
+
+# Copyright 2019 © Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# COPYRIGHT NOTICE ENDS HERE
+
+import argparse
+import subprocess
+import logging
+import sys
+import os
+
+import base
+
+log = logging.getLogger(name=__name__)
+
+
+def download(rpm_list, dst_dir):
+ if not base.check_tool('yumdownloader'):
+ log.error('ERROR: yumdownloader is not installed')
+ return 1
+
+ rpm_set = base.load_list(rpm_list)
+
+ command = 'yumdownloader --destdir={} {}'.format(dst_dir, ' '.join(rpm_set))
+ log.info('Running command: {}'.format(command))
+ try:
+ subprocess.check_call(command.split())
+ log.info('Downloaded')
+ except subprocess.CalledProcessError as err:
+ log.error(err.output)
+ return err.returncode
+
+
+
+def run_cli():
+ parser = argparse.ArgumentParser(description='Download rpm packages from list')
+ parser.add_argument('rpm_list', metavar='rpm-list',
+ help='File with list of npm packages to download.')
+ parser.add_argument('--output-dir', '-o', default=os.getcwd(),
+ help='Download destination')
+
+ args = parser.parse_args()
+
+ logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(message)s')
+
+ sys.exit(download(args.rpm_list, args.output_dir))
+
+
+if __name__ == '__main__':
+ run_cli()