diff --git a/localstack/config.py b/localstack/config.py index 35337cb614c2a..ffaf6cbaf1c66 100644 --- a/localstack/config.py +++ b/localstack/config.py @@ -192,9 +192,10 @@ def in_docker(): if LOCALSTACK_HOSTNAME == HOSTNAME: DOCKER_HOST_FROM_CONTAINER = 'host.docker.internal' # update LOCALSTACK_HOSTNAME if host.docker.internal is available - if is_in_docker and LOCALSTACK_HOSTNAME == DOCKER_BRIDGE_IP: + if is_in_docker: DOCKER_HOST_FROM_CONTAINER = socket.gethostbyname('host.docker.internal') - LOCALSTACK_HOSTNAME = DOCKER_HOST_FROM_CONTAINER + if LOCALSTACK_HOSTNAME == DOCKER_BRIDGE_IP: + LOCALSTACK_HOSTNAME = DOCKER_HOST_FROM_CONTAINER except socket.error: pass diff --git a/localstack/services/es/es_api.py b/localstack/services/es/es_api.py index e610ac01af54b..1384bbbb93754 100644 --- a/localstack/services/es/es_api.py +++ b/localstack/services/es/es_api.py @@ -2,6 +2,7 @@ import time from random import randint from flask import Flask, jsonify, request, make_response +from localstack.utils import persistence from localstack.services import generic_proxy from localstack.utils.aws import aws_stack from localstack.constants import TEST_AWS_ACCOUNT_ID @@ -161,7 +162,7 @@ def get_domain_status(domain_name, deleted=False): def start_elasticsearch_instance(): # Note: keep imports here to avoid circular dependencies from localstack.services.es import es_starter - from localstack.services.infra import check_infra, restore_persisted_data, Plugin + from localstack.services.infra import check_infra, Plugin api_name = 'elasticsearch' plugin = Plugin(api_name, start=es_starter.start_elasticsearch, check=es_starter.check_elasticsearch) @@ -172,7 +173,7 @@ def start_elasticsearch_instance(): # ensure that all infra components are up and running check_infra(apis=apis, additional_checks=[es_starter.check_elasticsearch]) # restore persisted data - restore_persisted_data(apis=apis) + persistence.restore_persisted_data(apis=apis) return t1 diff --git a/localstack/services/generic_proxy.py b/localstack/services/generic_proxy.py index 6c4f3ee94a95a..314db3a65d735 100644 --- a/localstack/services/generic_proxy.py +++ b/localstack/services/generic_proxy.py @@ -289,7 +289,7 @@ def is_full_url(url): kwargs = { 'method': method, 'path': path, - 'data': data, + 'data': self.data_bytes, 'headers': forward_headers, 'response': response } diff --git a/localstack/services/infra.py b/localstack/services/infra.py index 3f5fb52565d57..9475c8b9584d4 100644 --- a/localstack/services/infra.py +++ b/localstack/services/infra.py @@ -270,11 +270,6 @@ def get_service_status(service, port=None): return status -def restore_persisted_data(apis): - for api in apis: - persistence.restore_persisted_data(api) - - def register_signal_handlers(): global SIGNAL_HANDLERS_SETUP if SIGNAL_HANDLERS_SETUP: @@ -464,7 +459,7 @@ def start_infra(asynchronous=False, apis=None): # ensure that all infra components are up and running check_infra(apis=apis) # restore persisted data - restore_persisted_data(apis=apis) + persistence.restore_persisted_data(apis=apis) print('Ready.') sys.stdout.flush() if not asynchronous and thread: diff --git a/localstack/services/s3/s3_listener.py b/localstack/services/s3/s3_listener.py index 161c27c432e36..1ffa3f2d67115 100644 --- a/localstack/services/s3/s3_listener.py +++ b/localstack/services/s3/s3_listener.py @@ -802,9 +802,6 @@ def forward_request(self, method, path, data, headers): if method == 'PUT' and not headers.get('content-type'): headers['content-type'] = 'binary/octet-stream' - # persist this API call to disk - persistence.record('s3', method, path, data, headers) - # parse query params query = parsed_path.query path = parsed_path.path @@ -893,6 +890,9 @@ def return_response(self, method, path, data, headers, response): method = to_str(method) bucket_name = get_bucket_name(path, headers) + # persist this API call to disk + persistence.record('s3', method, path, data, headers, response) + # No path-name based bucket name? Try host-based hostname_parts = headers['host'].split('.') if (not bucket_name or len(bucket_name) == 0) and len(hostname_parts) > 1: diff --git a/localstack/utils/bootstrap.py b/localstack/utils/bootstrap.py index fa52e910e9d90..eaa28a0e00296 100644 --- a/localstack/utils/bootstrap.py +++ b/localstack/utils/bootstrap.py @@ -425,7 +425,7 @@ def stop(self, quiet=False): def run(cmd, print_error=True, asynchronous=False, stdin=False, stderr=subprocess.STDOUT, outfile=None, env_vars=None, inherit_cwd=False, inherit_env=True, tty=False): - # don't use subprocess module inn Python 2 as it is not thread-safe + # don't use subprocess module in Python 2 as it is not thread-safe # http://stackoverflow.com/questions/21194380/is-subprocess-popen-not-thread-safe if six.PY2: import subprocess32 as subprocess diff --git a/localstack/utils/persistence.py b/localstack/utils/persistence.py index 333b5ccc5aeb6..693deac53af25 100644 --- a/localstack/utils/persistence.py +++ b/localstack/utils/persistence.py @@ -8,7 +8,12 @@ from localstack.utils.aws import aws_stack from localstack.utils.common import to_bytes, to_str -API_FILE_PATTERN = '{data_dir}/{api}_api_calls.json' +USE_SINGLE_DUMP_FILE = True + +if USE_SINGLE_DUMP_FILE: + API_FILE_PATTERN = '{data_dir}/recorded_api_calls.json' +else: + API_FILE_PATTERN = '{data_dir}/{api}_api_calls.json' # Stack with flags to indicate whether we are currently re-playing API calls. # (We should not be re-playing and recording at the same time) @@ -18,37 +23,46 @@ API_FILE_PATHS = {} # set up logger -LOGGER = logging.getLogger(__name__) +LOG = logging.getLogger(__name__) -def should_record(api, method, path, data, headers): +def should_record(api, method, path, data, headers, response=None): """ Decide whether or not a given API call should be recorded (persisted to disk) """ if api == 's3': return method in ['PUT', 'POST', 'DELETE'] return False -def record(api, method, path, data, headers): +def record(api, method, path, data, headers, response=None): """ Record a given API call to a persistent file on disk """ file_path = get_file_path(api) - if CURRENTLY_REPLAYING or not file_path or not should_record(api, method, path, data, headers): + should_be_recorded = should_record(api, method, path, data, headers, response=response) + if CURRENTLY_REPLAYING or not file_path or not should_be_recorded: return entry = None try: if isinstance(data, dict): data = json.dumps(data) - if data or data in [u'', b'']: - try: - data = to_bytes(data) - except Exception as e: - LOGGER.warning('Unable to call to_bytes: %s' % e) - data = to_str(base64.b64encode(data)) + + def get_recordable_data(data): + if data or data in [u'', b'']: + try: + data = to_bytes(data) + except Exception as e: + LOG.warning('Unable to call to_bytes: %s' % e) + data = to_str(base64.b64encode(data)) + return data + + data = get_recordable_data(data) + response_data = get_recordable_data('' if response is None else response.content) + entry = { 'a': api, 'm': method, 'p': path, 'd': data, - 'h': dict(headers) + 'h': dict(headers), + 'rd': response_data } with open(file_path, 'a') as dumpfile: dumpfile.write('%s\n' % json.dumps(entry)) @@ -56,15 +70,19 @@ def record(api, method, path, data, headers): print('Error recording API call to persistent file: %s %s' % (e, traceback.format_exc())) +def prepare_replay_data(command): + data = command['d'] + data = data and base64.b64decode(data) + return data + + def replay_command(command): function = getattr(requests, command['m'].lower()) - data = command['d'] - if data: - data = base64.b64decode(data) + data = prepare_replay_data(command) endpoint = aws_stack.get_local_service_url(command['a']) full_url = (endpoint[:-1] if endpoint.endswith('/') else endpoint) + command['p'] - result = function(full_url, data=data, headers=command['h'], verify=False) - return result + response = function(full_url, data=data, headers=command['h'], verify=False) + return response def replay(api): @@ -83,11 +101,15 @@ def replay(api): finally: CURRENTLY_REPLAYING.pop(0) if count: - LOGGER.info('Restored %s API calls from persistent file: %s' % (count, file_path)) + LOG.info('Restored %s API calls from persistent file: %s' % (count, file_path)) -def restore_persisted_data(api): - return replay(api) +def restore_persisted_data(apis): + if USE_SINGLE_DUMP_FILE: + return replay('_all_') + apis = apis if isinstance(apis, list) else [apis] + for api in apis: + replay(apis) # ---------------