diff --git a/conf/appconfig.py b/conf/appconfig.py index 9a0e77c..3586015 100644 --- a/conf/appconfig.py +++ b/conf/appconfig.py @@ -78,13 +78,6 @@ 'name': 'github', 'type': 'github' } - }, - 'default': { - 'ref': CONFIG_PROVIDER_DEFAULT, - 'meta-info': { - 'name': 'default', - 'type': CONFIG_PROVIDER_DEFAULT, - } } } @@ -92,12 +85,18 @@ 'CONFIG_PROVIDER_LIST', 'default,etcd').split(',') MIME_JSON = 'application/json' +MIME_YAML = 'application/yaml' MIME_HTML = 'text/html' -MIME_ROOT_V1 = 'application/vnd.configservice.root.v1+json' -MIME_HEALTH_V1 = 'application/vnd.configservice.health.v1+json' +MIME_ROOT_V1 = 'application/vnd.totem-config-service.root.v1+json' +MIME_HEALTH_V1 = 'application/vnd.totem.health-v1+json' +MIME_PROVIDERS_V1 = 'application/vnd.totem-config-service.providers.v1+json' +MIME_PROVIDER_V1 = 'application/vnd.totem-config-service.provider.v1+json' SCHEMA_ROOT_V1 = 'root-v1' SCHEMA_HEALTH_V1 = 'health-v1' +SCHEMA_PROVIDERS_V1 = 'providers-v1' +SCHEMA_PROVIDER_V1 = 'provider-v1' +SCHEMA_CONFIG_META_V1 = 'config-meta-v1' API_MAX_PAGE_SIZE = 1000 API_DEFAULT_PAGE_SIZE = 10 diff --git a/configservice/__init__.py b/configservice/__init__.py index b8023d8..b794fd4 100644 --- a/configservice/__init__.py +++ b/configservice/__init__.py @@ -1 +1 @@ -__version__ = '0.0.1' +__version__ = '0.1.0' diff --git a/configservice/cluster_config/effective.py b/configservice/cluster_config/effective.py index 4a8679b..5f94377 100644 --- a/configservice/cluster_config/effective.py +++ b/configservice/cluster_config/effective.py @@ -88,15 +88,7 @@ def merge(current_config, provider, *merge_paths): return dict_merge( current_config, provider.load(name, *merge_paths)) - - use_paths = list(paths) - while True: - for provider in self.providers: - merged_config = merge(merged_config, provider, *use_paths) - if use_paths: - use_paths.pop() - else: - break - + for provider in self.providers: + merged_config = merge(merged_config, provider, *paths) return merged_config return cached(name, *paths) diff --git a/configservice/cluster_config/github.py b/configservice/cluster_config/github.py index 42209bf..c69161b 100644 --- a/configservice/cluster_config/github.py +++ b/configservice/cluster_config/github.py @@ -53,7 +53,9 @@ def _github_fetch(self, owner, repo, ref, name): '{path}'.format(**path_params) resp = requests.get(hub_url, params=query_params, auth=self.auth) if resp.status_code == 200: - return base64.decodebytes(resp.json()[u'content'].encode('utf-8')) + # Py2 compatibility + # TODO:Once we migrate to PY3, change this to decodebytes + return base64.decodestring(resp.json()[u'content'].encode('utf-8')) elif resp.status_code == 404: return None else: diff --git a/configservice/jinja/__init__.py b/configservice/jinja/__init__.py new file mode 100644 index 0000000..8c54abe --- /dev/null +++ b/configservice/jinja/__init__.py @@ -0,0 +1 @@ +__author__ = 'sukrit' diff --git a/configservice/jinja/conditions.py b/configservice/jinja/conditions.py new file mode 100644 index 0000000..df32156 --- /dev/null +++ b/configservice/jinja/conditions.py @@ -0,0 +1,52 @@ +from future.builtins import ( # noqa + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, filter, map, zip) +import re + +__author__ = 'sukrit' + +""" +Defines all the filters used for jinja templates (config) +""" + +USE_TESTS = ('starting_with', 'matching', ) + + +def apply_conditions(env): + """ + Applies filters on jinja env. + + :param env: Jinja environment + :return: + """ + + for name in USE_TESTS: + env.tests[name] = globals()[name] + return env + + +def starting_with(value, prefix): + """ + Filter to check if value starts with prefix + :param value: Input source + :type value: str + :param prefix: + :return: True if matches. False otherwise + :rtype: bool + """ + return str(value).startswith(str(prefix)) + + +def matching(value, pattern, casesensitive=True): + """ + Filter that performs a regex match + + :param value: Input source + :type value: str + :param pattern: Regex Pattern to be matched + :return: True if matches. False otherwise + :rtype: bool + """ + flags = re.I if not casesensitive else 0 + return re.match(str(pattern), str(value), flags) is not None diff --git a/configservice/services/config.py b/configservice/services/config.py index 9878cc9..2663d52 100644 --- a/configservice/services/config.py +++ b/configservice/services/config.py @@ -1,5 +1,9 @@ +import copy import json from parser import ParserError +import types +from jinja2 import TemplateSyntaxError +from jinja2.environment import get_spontaneous_environment from yaml.error import MarkedYAMLError from future.builtins import ( # noqa bytes, dict, int, list, object, range, str, @@ -7,20 +11,29 @@ pow, round, filter, map, zip) from jsonschema.exceptions import SchemaError -import repoze.lru -from conf.appconfig import CONFIG_PROVIDERS, CONFIG_PROVIDER_LIST, API_PORT +from conf.appconfig import CONFIG_PROVIDERS, CONFIG_PROVIDER_LIST, \ + BOOLEAN_TRUE_VALUES from configservice.cluster_config.effective import MergedConfigProvider from configservice.cluster_config.etcd import EtcdConfigProvider from configservice.cluster_config.github import GithubConfigProvider from configservice.cluster_config.s3 import S3ConfigProvider +from configservice.jinja import conditions from configservice.services.exceptions import ConfigProviderNotFound, \ - ConfigParseError + ConfigParseError, ConfigValueError from configservice.util import dict_merge __author__ = 'sukrit' +def get_providers(): + for provider_type in CONFIG_PROVIDER_LIST: + provider_type = provider_type.strip() + if provider_type in CONFIG_PROVIDERS: + yield provider_type + yield 'effective' + + def get_provider_types(): for provider_type in CONFIG_PROVIDER_LIST: provider_type = provider_type.strip() @@ -98,22 +111,6 @@ def _get_github_provider(): ) -@repoze.lru.lru_cache(1) -def _load_job_schema(schema_name=None): - """ - Helper function that loads given schema - - :param schema_name: - :return: - """ - base_url = 'http://localhost:%d' % API_PORT - schema_name = schema_name or 'job-config-v1' - fname = 'schemas/{0}.json'.format(schema_name) - with open(fname) as schema_file: - data = schema_file.read().replace('${base_url}', base_url) - return json.loads(data) - - def get_provider(provider_type): """ Factory method to create config provider instance. @@ -125,12 +122,10 @@ def get_provider(provider_type): :return: AbstractConfigProvider instance. :rtype: AbstractConfigProvider """ - if provider_type == 'default': - return get_provider(CONFIG_PROVIDERS['default']['ref']) if provider_type not in get_provider_types(): raise ConfigProviderNotFound(provider_type) - locator = '_get_%s_provider' % (provider_type) + locator = '_get_%s_provider' % provider_type if locator in globals(): return globals()[locator]() @@ -146,35 +141,114 @@ def _json_compatible_config(config): return json.loads(json.dumps(config)) -def load_config(*paths, **kwargs): +def _expand_groups(groups): """ - Loads config for given path and provider type. + Expand group path + :param groups: Groups that needs to be expanded + :type groups: list + :return: Expanded groups + :rtype: list + """ + expanded = [] + for group in groups: + if group == '..': + if expanded: + expanded.pop() + else: + expanded.append(group) + return expanded - :param paths: Tuple consisting of nested level path - :type paths: tuple - :keyword default_variables: Variables to be applied during template - evaluation - :type default_variables: dict - :keyword provider_type: Type of provider - :type provider_type: str - :keyword config_names: List of config names to be loaded. Defaults to - CONFIG_NAMES defined in appconfig - :type config_names: list + +def _expand_parent_groups(groups, parent_groups): + """ + Expand group path for parent config + :param groups: Config groups for current config + :type groups: list + :param parent_groups: Config groups for parent config + :type groups: list + :return: Expanded groups + :rtype: list + """ + if not parent_groups: + return [] + if parent_groups[0] == '..': + parent_groups = groups[:-1] + parent_groups[1:] + return _expand_groups(parent_groups) + + +def load_config(meta, processed_paths=None): + """ + Loads config for given path and provider type. + :param meta: Meta information for config loading + :type meta: dict + :keyword processed_paths: List of paths that are already processed + :type processed_paths: List :return: Parsed configuration :rtype: dict """ - provider_type = kwargs.get('provider_type', 'effective') - config_names = kwargs.get('config_names', ['totem']) + meta = dict_merge(meta, { + 'provider-type': 'effective', + 'name': 'totem', + 'default-config': None, + 'groups': [] + }) + + # Add Default Schema configuation + meta = dict_merge(meta, { + 'schema-config': { + 'schema': None, + 'groups': list(meta.get('groups') or []), + 'provider-type': meta.get('provider-type') + } + }) + processed_paths = processed_paths or [] + provider_type = meta.get('provider-type') + config_name = meta.get('name') + default_config = meta.get('default-config') + processed_paths = copy.deepcopy(processed_paths) + groups = list(meta.get('groups') or []) + process_path = '{}:'.format(provider_type).join(groups) + if process_path in processed_paths: + return load_config(dict_merge({ + 'name': 'cluster-def' + }, meta)) if meta.get('name') == 'totem' else {} + processed_paths.append(process_path) provider = get_provider(provider_type) try: - configs = [provider.load(name+'.yml', *paths) for name in config_names] - return _json_compatible_config(dict_merge(*configs)) - + merged_config = provider.load(config_name+'.yml', *groups) + merged_config.setdefault('.parent', {}) + merged_config['.parent'] = dict_merge(merged_config['.parent'], { + 'provider-type': provider_type, + 'name': config_name, + 'groups': ['..'], + 'enabled': True, + }) + merged_config['.parent']['evaluate'] = False + merged_config['.parent']['groups'] = _expand_parent_groups( + groups, merged_config['.parent']['groups']) + + merged_config = dict_merge( + merged_config, + load_config(merged_config['.parent'], + processed_paths=processed_paths) + ) if merged_config['.parent']['enabled'] else merged_config + merged_config = dict_merge(merged_config, default_config) + del(merged_config['.parent']) + + if not meta.get('evaluate'): + return _json_compatible_config(merged_config) + return dict(normalize_config( + evaluate_config( + _json_compatible_config(merged_config), + default_variables=meta.get('default-variables'), + transformations=meta.get('transformations')), + encrypted_keys=meta.get('encrypted-keys') + )) except (MarkedYAMLError, ParserError, SchemaError) as error: - raise ConfigParseError(str(error), paths) + raise ConfigParseError(str(error), groups) -def write_config(name, config, *paths, **kwargs): +def write_config(name, config, *groups, **kwargs): """ Writes config for given path @@ -187,4 +261,220 @@ def write_config(name, config, *paths, **kwargs): provider_type = kwargs.get('provider_type', 'effective') provider = get_provider(provider_type) if provider: - provider.write(name, config, *paths) + provider.write(name, config, *groups) + + +def evaluate_config(config, default_variables={}, var_key='variables', + transformations=None): + """ + Performs rendering of all template values defined in config. Also takes + user defined variables nd default variables for substitution in the config + . + :param config: + :param default_variables: + :param var_key: + :return: Evaluated config + :rtype: dict + """ + updated_config = copy.deepcopy(config) + updated_config.setdefault(var_key, {}) + if 'defaults' in updated_config: + # We do not want to do any processing ind efaults section. + # It is only used for YAML substitution which at this point is already + # done. + del(updated_config['defaults']) + updated_config = transform_string_values( + evaluate_value(updated_config, default_variables), + transformations=transformations) + + return updated_config + + +def _normalize_encrypted_config(env_config): + if isinstance(env_config, dict): + return { + 'value': str(env_config.get('value') or ''), + 'encrypted': env_config.get('encrypted', False) + } + return { + 'value': str(env_config), + 'encrypted': False + } + + +def normalize_config(config, encrypted_keys=None): + """ + Normalizes the config + :param config: + :return: + """ + encrypted_keys = encrypted_keys or () + for config_key, config_val in config.items(): + if isinstance(config_val, dict): + if config_key in encrypted_keys: + yield config_key, { + env_key: _normalize_encrypted_config(env_val) + for env_key, env_val in config_val.items() + } + else: + yield config_key, dict(normalize_config( + config_val, encrypted_keys=encrypted_keys)) + else: + yield config_key, config_val + + +def _get_jinja_environment(): + """ + Creates Jinja env for evaluating config + + :return: Jinja Environment + """ + env = get_spontaneous_environment() + env.line_statement_prefix = '#' + return conditions.apply_conditions(env) + + +def evaluate_template(template_value, variables={}): + env = _get_jinja_environment() + return env.from_string(str(template_value)).render(**variables).strip() + + +def evaluate_variables(variables, default_variables={}): + + merged_vars = dict_merge({}, default_variables) + + def get_sort_key(item): + return item[1]['priority'] + + def as_tuple(vars): + for variable_name, variable_val in vars.items(): + variable_val = copy.deepcopy(variable_val) + if not hasattr(variable_val, 'items'): + variable_val = { + 'value': variable_val, + 'template': False, + 'priority': 0 + } + variable_val.setdefault('template', True) + variable_val.setdefault('priority', 1) + variable_val.setdefault('value', '') + val = variable_val['value'] + if isinstance(val, bool): + variable_val['value'] = str(val).lower() + yield (variable_name, variable_val) + + def expand(var_name, var_value): + try: + merged_vars[var_name] = evaluate_template( + var_value['value'], merged_vars) if var_value['template'] \ + else var_value['value'] + except Exception as exc: + raise ConfigValueError('/variables/%s/' % var_name, var_value, + str(exc)) + + sorted_vars = sorted(as_tuple(variables), key=get_sort_key) + for sorted_var_name, sorted_var_value in sorted_vars: + expand(sorted_var_name, sorted_var_value) + + return merged_vars + + +def evaluate_value(value, variables={}, location='/'): + """ + Renders tokenized values (using nested strategy) + + :param value: Value that needs to be evaluated (str , list, dict, int etc) + :param variables: Variables to be used for Jinja2 templates + :param identifier: Identifier used to identify tokenized values. Only str + values that begin with identifier are evaluated. + :return: Evaluated object. + """ + value = copy.deepcopy(value) + if hasattr(value, 'items'): + if 'variables' in value: + variables = evaluate_variables(value['variables'], variables) + del(value['variables']) + + if 'value' in value: + value.setdefault('encrypted', False) + value.setdefault('template', True) + if value['template']: + try: + value['value'] = evaluate_template(value['value'], + variables) + except TemplateSyntaxError as error: + raise ConfigValueError(location, value['value'], + reason=error.message) + del(value['template']) + if not value['encrypted']: + value = value['value'] + return value + + else: + if '.defaults' in value: + defaults = value.get('.defaults') + del(value['.defaults']) + elif '__defaults__' in value: + # Added for backward compatibility + # __defaults__ is now deprecated and will be removed in next + # release + defaults = value.get('__defaults__') + del(value['__defaults__']) + else: + defaults = None + for each_k, each_v in value.items(): + if defaults and hasattr(each_v, 'items'): + each_v = dict_merge(each_v, defaults) + value[each_k] = evaluate_value(each_v, variables, + '%s%s/' % (location, each_k)) + return { + each_k: evaluate_value(each_v, variables) + for each_k, each_v in value.items() + } + + elif isinstance(value, (list, tuple, set, types.GeneratorType)): + return [evaluate_value(each_v, variables, '%s[]/' % (location, )) + for each_v in value] + + return value.strip() if isinstance(value, (str,)) else value + + +def transform_string_values(config, transformations=None): + """ + Transforms the string values to appropriate type in config + + :param config: dictionary configuration with evaluated template parameters + :type config: dict + :return: transformed config + :rtype: dict + """ + new_config = copy.deepcopy(config) + transformations = transformations or {} + + def convert_keys(use_config, location='/'): + if hasattr(use_config, 'items'): + for each_k, each_v in use_config.items(): + try: + if each_v is None: + continue + elif each_k in transformations.get('boolean-keys', [])\ + and isinstance(each_v, str): + use_config[each_k] = each_v.lower() in \ + BOOLEAN_TRUE_VALUES + elif each_k in transformations.get('number-keys', [])\ + and isinstance(each_v, str): + use_config[each_k] = int(each_v) + elif hasattr(each_v, 'items'): + convert_keys(each_v, '%s%s/' % + (location, each_k)) + elif isinstance(each_v, + (list, tuple, set, types.GeneratorType)): + for idx, val in enumerate(each_v): + convert_keys( + val, '%s%s[%d]/' % (location, each_k, idx)) + except ValueError as error: + raise ConfigValueError(location + each_k, each_v, + error.message) + + convert_keys(new_config) + return new_config diff --git a/configservice/services/exceptions.py b/configservice/services/exceptions.py index ee30757..e42b2d3 100644 --- a/configservice/services/exceptions.py +++ b/configservice/services/exceptions.py @@ -1,6 +1,25 @@ +import json from configservice.exceptions import BusinessRuleViolation, ConfigServiceError +class ConfigValueError(BusinessRuleViolation): + + def __init__(self, path, value, reason): + self.path = path + self.value = value + self.reason = reason + message = 'Error happened while parsing path:%s value:%s. %s' % \ + (path, value, reason) + details = { + 'path': self.path, + 'value': self.value, + 'reason': self.reason + } + + super(ConfigValueError, self).__init__( + message, code='CONFIG_ERROR', details=details) + + class ConfigProviderNotFound(ConfigServiceError): def __init__(self, provider_type): @@ -27,3 +46,18 @@ def __init__(self, error_msg, paths): } super(ConfigParseError, self).__init__(message, code=code, details=details) + + +class ConfigValidationError(BusinessRuleViolation): + + def __init__(self, message, schema_path, schema): + self.schema_path = schema_path + self.schema = schema + code = 'CONFIG_VALIDATION_ERROR' + details = { + 'schema-path': self.schema_path, + 'schema': json.dumps(self.schema) + } + + super(ConfigValidationError, self).__init__( + message, code=code, details=details) diff --git a/configservice/views/config.py b/configservice/views/config.py index ca95589..3430ca4 100644 --- a/configservice/views/config.py +++ b/configservice/views/config.py @@ -1,9 +1,11 @@ import flask from flask.views import MethodView -from conf.appconfig import SCHEMA_ROOT_V1, MIME_JSON +from conf.appconfig import MIME_JSON, MIME_YAML, \ + SCHEMA_CONFIG_META_V1 from configservice.services import config from configservice.services.config import get_provider_types +from configservice.util import dict_merge from configservice.views import hypermedia from configservice.views.util import build_response @@ -14,11 +16,13 @@ class ConfigApi(MethodView): """ @hypermedia.produces({ - MIME_JSON: SCHEMA_ROOT_V1 + MIME_JSON: None, + MIME_YAML: None }, default=MIME_JSON) - def get(self, provider, groups, configs, **kwargs): + def get(self, provider, groups, config_type, name, accept_mimetype=None, + **kwargs): """ - Lists all providers. + Returns config for given provider, groups, config_type and name :param kwargs: :return: @@ -31,22 +35,57 @@ def get(self, provider, groups, configs, **kwargs): groups = '' return build_response(config.load_config( - *(group for group in groups.split(',') if group), - config_names=[ - config_name for config_name in configs.split(',') - if config_name], - provider_type=provider - )) + meta={ + 'groups': [group for group in groups.split(',') if group], + 'name': name, + 'provider-type': provider, + 'evaluate': config_type.lower() == 'evaluated' + } + ), mimetype=accept_mimetype) + + @hypermedia.consumes({ + MIME_JSON: SCHEMA_CONFIG_META_V1 + }) + @hypermedia.produces({ + MIME_JSON: None, + MIME_YAML: None + }, default=MIME_JSON) + def post(self, provider, groups, name, accept_mimetype=None, + request_data=None, **kwargs): + """ + Generates evaluated config based on additional information specified in + as part of request payload. + :param kwargs: + :return: + """ + + if provider not in get_provider_types(): + flask.abort(404) + else: + if groups == '_': + groups = '' + + meta = dict_merge({ + 'groups': [group for group in groups.split(',') if group], + 'name': name, + 'provider-type': provider, + 'evaluate': True + }, request_data) + + return build_response( + config.load_config(meta=meta), + mimetype=accept_mimetype) def register(app, **kwargs): """ - Registers Provider ('/providers') - Only GET operation is available. + Registers Config API :param app: Flask application :return: None """ - config_func = ConfigApi.as_view('configs') - for uri in ['/providers//groups//configs/']: + config_func = ConfigApi.as_view('config') + for uri in ['/providers//groups///']: app.add_url_rule(uri, view_func=config_func, methods=['GET']) + app.add_url_rule('/providers//groups//evaluated/', + view_func=config_func, methods=['POST']) diff --git a/configservice/views/provider.py b/configservice/views/provider.py index 751636c..f7817fd 100644 --- a/configservice/views/provider.py +++ b/configservice/views/provider.py @@ -1,7 +1,7 @@ import flask from flask.views import MethodView -from conf.appconfig import SCHEMA_ROOT_V1, MIME_JSON, \ - CONFIG_PROVIDERS +from conf.appconfig import MIME_JSON, CONFIG_PROVIDERS, SCHEMA_PROVIDERS_V1, \ + SCHEMA_PROVIDER_V1, MIME_PROVIDER_V1, MIME_PROVIDERS_V1 from configservice.services.config import get_providers_meta_info, \ get_provider_types from configservice.views import hypermedia @@ -14,25 +14,35 @@ class ProviderApi(MethodView): Provider API """ - @hypermedia.produces({ - MIME_JSON: SCHEMA_ROOT_V1 - }, default=MIME_JSON) - def get(self, name=None, **kwargs): + def get(self, name=None): """ - Lists all providers. - + Lists providers / fetches single provider information :param kwargs: :return: """ - provider_types = get_provider_types() if name: - if name not in provider_types: - flask.abort(404) - else: - provider = CONFIG_PROVIDERS[name]['meta-info'] - return build_response(provider) + return self.get_provider(name) else: - return build_response(get_providers_meta_info()) + return self.list() + + @hypermedia.produces({ + MIME_JSON: SCHEMA_PROVIDER_V1, + MIME_PROVIDER_V1: SCHEMA_PROVIDER_V1 + }, default=MIME_PROVIDER_V1) + def get_provider(self, name, **kwargs): + provider_types = get_provider_types() + if name not in provider_types: + flask.abort(404) + else: + provider = CONFIG_PROVIDERS[name]['meta-info'] + return build_response(provider) + + @hypermedia.produces({ + MIME_JSON: SCHEMA_PROVIDERS_V1, + MIME_PROVIDERS_V1: SCHEMA_PROVIDERS_V1 + }, default=MIME_PROVIDERS_V1) + def list(self, **kwargs): + return build_response(get_providers_meta_info()) def register(app, **kwargs): diff --git a/configservice/views/util.py b/configservice/views/util.py index 42cff76..9274d7f 100644 --- a/configservice/views/util.py +++ b/configservice/views/util.py @@ -3,6 +3,7 @@ import json from flask import jsonify, Response, request +import yaml from conf.appconfig import MIME_JSON, API_DEFAULT_PAGE_SIZE @@ -22,7 +23,9 @@ def build_response(output, status=200, mimetype=MIME_JSON, :type headers: dict :return: Tuple consisting of Flask Response, Status Code and Http Headers """ - if isinstance(output, list): + if mimetype.lower().endswith('yaml'): + resp = Response(yaml.dump(output)) + elif isinstance(output, list): resp = Response(json.dumps(output)) else: resp = jsonify(output) diff --git a/dev-requirements.txt b/dev-requirements.txt index a5df058..927218b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,10 +1,10 @@ nose==1.3.4 mock==1.0.1 -flake8==2.2.3 +flake8 Sphinx==1.2.3 # For freezing time -freezegun==0.2.8 +https://github.com/spulec/freezegun/archive/master.tar.gz # Code Coverage -coveralls \ No newline at end of file +coveralls diff --git a/requirements.txt b/requirements.txt index b0b8ce0..229cfdb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ Flask==0.10.1 Flask-Cors==1.9.0 -gevent==1.0.1 -uWSGI==2.0.7 +gevent==1.0.2 +uWSGI==2.0.12 future==0.15.0 PyYAML==3.11 -python-etcd==0.3.2 +python-etcd==0.3.3 requests[security]==2.7.0 urllib3==1.11 https://github.com/totem/flask-hyperschema/archive/v0.1.1.tar.gz diff --git a/schemas/config-meta-v1.json b/schemas/config-meta-v1.json new file mode 100644 index 0000000..0d6b8ee --- /dev/null +++ b/schemas/config-meta-v1.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "title": "Schema for meta information used for config generation", + "id": "#job-config-v1", + "type": "object", + "properties": { + "default-config": { + "type": "object", + "description": "Default config that needs to be merged with generated config" + }, + "default-variables": { + "type": "object", + "description": "Default variables for evaluation" + } + }, + "additionalProperties": false +} diff --git a/schemas/health-v1.json b/schemas/health-v1.json new file mode 100644 index 0000000..1dc858a --- /dev/null +++ b/schemas/health-v1.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-04/hyper-schema#", + "type": "object", + "title": "HyperSchema for health API", + "id": "#health-v1", + "properties": { + "etcd": { + "$ref": "#/definitions/service-health" + }, + "elasticsearch":{ + "type": "#/definitions/service-health" + } + }, + "definitions": { + "service-health": { + "properties":{ + "details": { + "type": "object", + "description": "Object describing entity health or error." + }, + "status": { + "enum": ["ok", "failed"] + } + } + } + }, + "links": [ + { + "rel": "self", + "href": "${base_url}", + "mediaType": "application/vnd.orch.health-v1+json", + "method": "GET" + }, + { + "rel": "root", + "href": "/", + "mediaType": "application/vnd.orch.root-v1+json", + "method": "GET" + } + ] +} \ No newline at end of file diff --git a/schemas/provider-v1.json b/schemas/provider-v1.json new file mode 100644 index 0000000..3048ec6 --- /dev/null +++ b/schemas/provider-v1.json @@ -0,0 +1,66 @@ +{ + "$schema": "http://json-schema.org/draft-04/hyper-schema#", + "type": "object", + "title": "HyperSchema for provider API", + "id": "#provider-v1", + "type": "object", + "properties": { + "type": { + "enum": [ "s3", "github", "effective", "scm-default"], + "title": "Provider Type" + }, + "readonly": { + "type": "boolean", + "description": "Is this provider readonly (does not support write operations) ?" + }, + "name": { + "type": "string", + "description": "Name for config provider" + } + }, + "links": [ + { + "rel": "self", + "href": "${base_url}", + "mediaType": "vnd.totem-config-service.provider-v1+json", + "method": "GET", + "title": "Provider Info" + }, + { + "rel": "config", + "href": "groups/{groups}/{config_type}/{name}", + "mediaType": "application/json", + "method": "GET", + "description": "Gets config for given groups, config_type and name using json format" + }, + { + "rel": "config", + "href": "groups/{groups}/{config_type}/{name}", + "mediaType": "application/yml", + "method": "GET", + "description": "Gets config for given groups, config_type and name using yml format" + }, + { + "rel": "config-generate", + "description": "Generates evaluated config (json) using meta information.", + "href": "groups/{groups}/{config_type}/{name}", + "mediaType": "application/json", + "encType": "application/totem-config-service.config-meta-v1+json", + "method": "POST", + "schema": { + "$ref": "config-meta-v1" + } + }, + { + "rel": "config-generate", + "description": "Generates evaluated config (yml) using meta information.", + "href": "groups/{groups}/{config_type}/{name}", + "mediaType": "application/yml", + "encType": "application/totem-config-service.config-meta-v1+json", + "method": "POST", + "schema": { + "$ref": "config-meta-v1" + } + } + ] +} \ No newline at end of file diff --git a/schemas/providers-v1.json b/schemas/providers-v1.json new file mode 100644 index 0000000..e97a707 --- /dev/null +++ b/schemas/providers-v1.json @@ -0,0 +1,26 @@ +{ + "$schema": "http://json-schema.org/draft-04/hyper-schema#", + "type": "object", + "title": "HyperSchema for providers API", + "id": "#providers-v1", + "type": "array", + "items": { + "$ref": "provider-v1" + }, + "links": [ + { + "rel": "self", + "href": "${base_url}", + "mediaType": "vnd.totem-config-service.providers-v1+json", + "method": "GET", + "title": "Providers List" + }, + { + "rel": "provider", + "href": "/providers/{provider}", + "mediaType": "application/totem-config-service.provider-v1+json", + "method": "GET", + "title": "Provider Info" + } + ] +} \ No newline at end of file diff --git a/schemas/root-v1.json b/schemas/root-v1.json new file mode 100644 index 0000000..1a3365e --- /dev/null +++ b/schemas/root-v1.json @@ -0,0 +1,34 @@ +{ + "$schema": "http://json-schema.org/draft-04/hyper-schema#", + "type": "object", + "title": "HyperSchema for root API", + "id": "#root-v1", + "properties": { + "version": { + "type": "string" + } + }, + "links": [ + { + "rel": "self", + "href": "${base_url}", + "mediaType": "application/vnd.totem-config-service.root-v1+json", + "method": "GET", + "title": "API Root" + }, + { + "rel": "health", + "href": "/health", + "mediaType": "application/vnd.totem.health-v1+json", + "method": "GET", + "title": "Healthcheck API" + }, + { + "rel": "providers", + "href": "/providers", + "mediaType": "application/vnd.totem-config-service.providers-v1+json", + "method": "GET", + "title": "Config Provider List" + } + ] +} \ No newline at end of file diff --git a/tests/unit/configservice/__init__.py b/tests/unit/configservice/__init__.py new file mode 100644 index 0000000..8c54abe --- /dev/null +++ b/tests/unit/configservice/__init__.py @@ -0,0 +1 @@ +__author__ = 'sukrit' diff --git a/tests/unit/configservice/cluster_config/__init__.py b/tests/unit/configservice/cluster_config/__init__.py new file mode 100644 index 0000000..8c54abe --- /dev/null +++ b/tests/unit/configservice/cluster_config/__init__.py @@ -0,0 +1 @@ +__author__ = 'sukrit' diff --git a/tests/unit/configservice/cluster_config/test_base.py b/tests/unit/configservice/cluster_config/test_base.py new file mode 100644 index 0000000..d7d6513 --- /dev/null +++ b/tests/unit/configservice/cluster_config/test_base.py @@ -0,0 +1,46 @@ +from configservice.cluster_config.base import AbstractConfigProvider +from nose.tools import raises + +__author__ = 'sukrit' + + +class TestAbstractConfigProvider: + """ + Tests MergedConfigProvider + """ + + def setup(self): + self.provider = AbstractConfigProvider() + + @raises(NotImplementedError) + def test_load(self): + """ + Should raise NotImplementedError + """ + + # When I invoke the root endpoint + self.provider.load('path1') + + # Then: NotImplementedError is raised + + @raises(NotImplementedError) + def test_write(self): + """ + Should raise NotImplementedError + """ + + # When I invoke the root endpoint + self.provider.write({}, 'path1') + + # Then: NotImplementedError is raised + + @raises(NotImplementedError) + def test_delete(self): + """ + Should raise NotImplementedError + """ + + # When I invoke the root endpoint + self.provider.delete('path1') + + # Then: NotImplementedError is raised diff --git a/tests/unit/configservice/cluster_config/test_effective.py b/tests/unit/configservice/cluster_config/test_effective.py new file mode 100644 index 0000000..d2ee0c9 --- /dev/null +++ b/tests/unit/configservice/cluster_config/test_effective.py @@ -0,0 +1,151 @@ +from __future__ import (absolute_import, division, + print_function, unicode_literals) +import copy +from future.builtins import ( # noqa + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) +from tests.helper import dict_compare +from configservice.cluster_config.base import AbstractConfigProvider +from configservice.cluster_config.effective import MergedConfigProvider +from nose.tools import eq_ + +__author__ = 'sukrit' + + +class InMemoryProvider(AbstractConfigProvider): + + def __init__(self, init_cache=None): + self.cache = copy.deepcopy(init_cache) if init_cache else {} + + def write(self, name, config, *paths): + self.cache['/' + '/'.join(paths) + ':' + name] = copy.deepcopy(config) + + def load(self, name, *paths): + return copy.deepcopy(self.cache.get('/' + '/'.join(paths) + ':' + name, + {})) + + def delete(self, name, *paths): + self.cache.pop('/' + '/'.join(paths) + ':' + name) + + +class TestMergedConfigProvider: + """ + Tests MergedConfigProvider + """ + + def setup(self): + self.cache_provider = InMemoryProvider() + self.write_provider = InMemoryProvider() + self.provider1 = InMemoryProvider(init_cache={ + '/:totem.yml': { + 'key1': 'provider1-1.0' + }, + '/path1:totem.yml': { + 'key1': 'provider1-1.1', + 'key2': 'provider1-2.1' + }, + '/path1/path2:totem.yml': { + 'key1': 'provider1-1.2', + 'key3': 'provider1-3.2' + } + }) + self.provider2 = InMemoryProvider(init_cache={ + '/:totem.yml': {}, + '/path1:totem.yml': { + 'key1': 'provider2-1.1', + 'key2': 'provider2-2.1', + 'key4': 'provider2-4.1', + }, + '/path1/path2:totem.yml': { + 'key1': 'provider2-1.2', + 'key2': 'provider2-2.2', + } + + }) + self.provider = MergedConfigProvider( + self.provider1, self.provider2, cache_provider=self.cache_provider, + write_provider=self.write_provider) + + def test_write(self): + """ + Should set the Link header for root endpoint. + """ + + # When: When I try to write a config + self.provider.write({}, 'path1', 'path2') + + # Then: Config is written using write provider + eq_(self.write_provider.load('path1', 'path2'), {}) + + def test_delete(self): + """ + Should set the Link header for root endpoint. + """ + + self.write_provider.write('totem.yml', {'deleteme': 'deleteme'}, + 'path1') + + # When: When I try to write a config + self.provider.delete('totem.yml', 'path1') + + # Then: Config is written using write provider + eq_(self.write_provider.load('totem.yml', 'path1'), {}) + + def test_write_when_no_write_provider_specified(self): + """ + Should perform no operation when no write provider is specified. + """ + + # Given: Merged Config provider with no write provider specified + self.provider.write_provider = None + + # When: I write the config + self.provider.write({}, 'path1', 'path2') + + # Then: No exception is raised + + def test_load_with_no_caching(self): + # Given: Merged Config provider with no cache provider + self.provider.cache_provider = None + + # When: I load config + merged_config = self.provider.load('totem.yml', 'path1', 'path2') + + # Then: Merged config is returned + dict_compare(merged_config, { + 'key1': 'provider1-1.2', + 'key2': 'provider2-2.2', + 'key3': 'provider1-3.2' + }) + + def test_load_with_caching(self): + + # When: I load config + merged_config = self.provider.load('totem.yml', 'path1', 'path2') + + # Then: Merged config is returned + expected_config = { + 'key1': 'provider1-1.2', + 'key2': 'provider2-2.2', + 'key3': 'provider1-3.2' + } + dict_compare(merged_config, expected_config) + + # And config gets cached + dict_compare(self.cache_provider.load('totem.yml', 'path1', 'path2'), + expected_config) + + def test_load_with_cached_value(self): + # Give: Cached Config + cached_config = { + 'cached_key': 'cached_value' + } + self.cache_provider.write('totem.yml', cached_config, 'path1', 'path2') + + # When: I load config + merged_config = self.provider.load('totem.yml', 'path1', 'path2') + + # Then: Cached config is returned + dict_compare(merged_config, cached_config) diff --git a/tests/unit/configservice/cluster_config/test_github.py b/tests/unit/configservice/cluster_config/test_github.py new file mode 100644 index 0000000..769d33b --- /dev/null +++ b/tests/unit/configservice/cluster_config/test_github.py @@ -0,0 +1,180 @@ +from __future__ import (absolute_import, division, + print_function, unicode_literals) +from future.builtins import ( # noqa + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) +from mock import patch +import nose +from configservice.cluster_config.github import GithubConfigProvider, \ + GithubFetchException +from tests.helper import dict_compare +from nose.tools import eq_ + +__author__ = 'sukrit' + + +class TestGuthubConfigProvider: + + def setup(self): + self.provider = GithubConfigProvider() + + def test_init_when_no_parameters_passed(self): + """ + Should initialize GithubConfigProvider with default values + """ + eq_(self.provider.config_base, '/') + eq_(self.provider.auth, None) + + def test_init(self): + """ + Should initialize GithubConfigProvider with provided values + """ + # When: I initialize GithubConfigProvider with provided values + provider = GithubConfigProvider(token='MOCK_TOKEN', config_base='/f1/') + eq_(provider.config_base, '/f1/') + eq_(provider.auth, ('MOCK_TOKEN', 'x-oauth-basic')) + + @patch('requests.get') + def test_load_for_partial_path(self, m_get): + """ + Should return empty config from GithubConfigProvider for partial path + """ + + # When: I load config using provider + ret_value = self.provider.load( + 'totem.yml', 'totem', 'cluster-orchestrator') + + # Then: Config gets loaded + dict_compare(ret_value, {}) + + @patch('requests.get') + def test_load_for_full_path(self, m_get): + """ + Should read config from github + """ + # Given: Existing config + m_get.return_value.status_code = 200 + m_get.return_value.json.return_value = { + 'content': 'dmFyaWFibGVzOiB7fQ==' + } + + # When: I load config using provider + ret_value = self.provider.load( + 'totem.yml', 'local', 'totem', 'cluster-orchestrator', 'develop') + + # Then: Config gets loaded + dict_compare(ret_value, {'variables': {}}) + + @patch('requests.get') + def test_load_for_non_existing_path(self, m_get): + """ + Should return empty config when config is not found in github + """ + # Given: Existing config + m_get.return_value.status_code = 404 + + # When: I load config using provider + ret_value = self.provider.load( + 'totem.yml', 'totem', 'cluster-orchestrator', 'develop') + + # Then: EmptyConfig gets loaded + dict_compare(ret_value, {}) + + @patch('requests.get') + def test_load_when_github_fetch_fails_with_raw_text(self, m_get): + """ + Should read config from github + """ + # Given: Existing config + m_get.return_value.status_code = 500 + m_get.return_value.headers = {} + m_get.return_value.text = 'mock error' + + # When: I load config using provider + with nose.tools.assert_raises(GithubFetchException) as cm: + self.provider.load( + 'totem.yml', 'local', 'totem', 'cluster-orchestrator', + 'develop') + + # Then: Expected exception is raised + dict_compare(cm.exception.response, { + 'url': 'https://api.github.com/repos/totem/cluster-orchestrator/' + 'contents/totem.yml', + 'status': 500, + 'response': { + 'raw': 'mock error' + } + }) + + +class TestGithubFetchException(): + + def test_init_with_empty_response(self): + """ + Should initialize using empty github response + """ + + # When: I initialize with empty response + exc = GithubFetchException() + + # Then: Default value are assigned to exception fields + eq_(exc.response, {}) + + def test_init(self): + """ + Should initialize using provided github response + """ + + # When: I initialize with empty response + exc = GithubFetchException(github_response={ + 'response': { + 'message': 'mock error', + }, + 'url': 'mock url', + 'status': 500 + }) + + # Then: Expected response is returned + eq_(exc.message, 'Failed to fetch config from github using ' + 'url:mock url. Status:500. Reason: mock error') + + def test_str_representation(self): + """ + Should return string representation + """ + + # Given: Existing exception instance + exc = GithubFetchException() + + # When: I return string representation + rep = str(exc) + + # Then: Expected string representation is returned. + eq_(rep, exc.message) + + def test_to_dict_representation(self): + """ + Should return dict representation for GithubFetchException + """ + + # Given: Existing instance of GithubFetchException + exc = GithubFetchException(github_response={ + 'response': { + 'message': 'mock error', + }, + 'url': 'mock url', + 'status': 500 + }) + + # When: I return string representation + rep = exc.to_dict() + + # Then: Expected string representation is returned. + dict_compare(rep, { + 'message': 'Failed to fetch config from github using url:mock url.' + ' Status:500. Reason: mock error', + 'code': 'GITHUB_CONFIG_FETCH_FAILED', + 'details': exc.response + }) diff --git a/tests/unit/configservice/services/__init__.py b/tests/unit/configservice/services/__init__.py new file mode 100644 index 0000000..8c54abe --- /dev/null +++ b/tests/unit/configservice/services/__init__.py @@ -0,0 +1 @@ +__author__ = 'sukrit' diff --git a/tests/unit/configservice/services/test_config.py b/tests/unit/configservice/services/test_config.py new file mode 100644 index 0000000..d6f9e14 --- /dev/null +++ b/tests/unit/configservice/services/test_config.py @@ -0,0 +1,680 @@ +from __future__ import (absolute_import, division, + print_function, unicode_literals) + +from parser import ParserError +from future.builtins import ( # noqa + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) +from mock import patch +from nose.tools import eq_, raises +from configservice.cluster_config.effective import MergedConfigProvider +from configservice.cluster_config.etcd import EtcdConfigProvider +from configservice.cluster_config.github import GithubConfigProvider +from configservice.cluster_config.s3 import S3ConfigProvider +from configservice.services import config +from configservice.services.exceptions import ConfigParseError, \ + ConfigProviderNotFound +from tests.helper import dict_compare + +import configservice.services.config as service + + +__author__ = 'sukrit' + + +@patch.dict('configservice.services.config.CONFIG_PROVIDERS', { + 'provider1': {}, + 'provider3': {} +}) +@patch('configservice.services.config.CONFIG_PROVIDER_LIST') +def test_get_providers(mock_provider_list): + """ + Should get the list of available providers + """ + # Given: Existing config provider list" + + mock_provider_list.__iter__.return_value = ['provider1', 'provider2'] + + # When: I fetch provider list + providers = service.get_providers() + + # Then: Expected provider list is returned + eq_(list(providers), ['provider1', 'effective']) + + +@raises(ConfigProviderNotFound) +def test_get_provider_when_not_found(): + """ + Should raise ConfigProviderNotFound when provider is not found + """ + + # When: I fetch provider that does not exists + service.get_provider('invalid') + + # Then: ConfigProviderNotFound is raised + + +@patch.dict('configservice.services.config.CONFIG_PROVIDERS', { + 'etcd': { + 'host': 'mockhost', + 'port': 10000, + 'base': '/mock' + } +}) +@patch('configservice.services.config.CONFIG_PROVIDER_LIST') +def test_get_etcd_provider(mock_provider_list): + """ + Should return etcd provider + """ + # Given: Existing config provider list" + mock_provider_list.__contains__.return_value = True + mock_provider_list.__iter__.return_value = ['etcd'] + + # When: I fetch provider that does not exists + provider = service.get_provider('etcd') + + # Then: Etcd Config Provider is returned + eq_(isinstance(provider, EtcdConfigProvider), True) + eq_(provider.etcd_cl.host, 'mockhost') + eq_(provider.etcd_cl.port, 10000) + eq_(provider.config_base, '/mock/config') + eq_(provider.ttl, None) + + +@patch.dict('configservice.services.config.CONFIG_PROVIDERS', { + 's3': { + 'bucket': 'mockbucket', + 'base': '/mock' + } +}) +@patch('configservice.services.config.CONFIG_PROVIDER_LIST') +def test_get_s3_provider(mock_provider_list): + """ + Should return s3 provider + """ + # Given: Existing config provider list" + mock_provider_list.__contains__.return_value = True + mock_provider_list.__iter__.return_value = ['s3'] + + # When: I fetch provider that does not exists + provider = service.get_provider('s3') + + # Then: Etcd Config Provider is returned + eq_(isinstance(provider, S3ConfigProvider), True) + eq_(provider.bucket, 'mockbucket') + eq_(provider.config_base, '/mock') + + +@patch.dict('configservice.services.config.CONFIG_PROVIDERS', { + 'github': { + 'token': 'mocktoken', + 'config_base': '/mock' + } +}) +@patch('configservice.services.config.CONFIG_PROVIDER_LIST') +def test_get_github_provider(mock_provider_list): + """ + Should return github provider + """ + # Given: Existing config provider list" + mock_provider_list.__contains__.return_value = True + mock_provider_list.__iter__.return_value = ['github'] + + # When: I fetch provider that does not exists + provider = service.get_provider('github') + + # Then: Etcd Config Provider is returned + eq_(isinstance(provider, GithubConfigProvider), True) + eq_(provider.auth, ('mocktoken', 'x-oauth-basic')) + eq_(provider.config_base, '/mock') + + +@patch.dict('configservice.services.config.CONFIG_PROVIDERS', { + 'etcd': { + 'host': 'mockhost', + 'port': 10000, + 'base': '/mock' + }, + 'effective': { + 'cache': { + 'enabled': True, + 'ttl': 300 + } + } +}) +@patch('configservice.services.config.CONFIG_PROVIDER_LIST') +def test_get_effective_provider(mock_provider_list): + """ + Should return effective provider + :return: + """ + + """ + Should return effective provider provider + """ + # Given: Existing config provider list" + mock_provider_list.__contains__.return_value = True + mock_provider_list.__iter__.return_value = ['effective', 'etcd'] + + # When: I fetch provider that does not exists + provider = service.get_provider('effective') + + # Then: Etcd Config Provider is returned + eq_(isinstance(provider, MergedConfigProvider), True) + eq_(len(provider.providers), 1) + + +def test_evaluate_value_with_nested_variables(): + """ + Should evaluate value by parsing templates. + :return: + """ + + # Given: Object that needs to be evaluated + obj = { + 'variables': { + 'var2': { + 'value': '{{ var2 }}-modified' + } + }, + 'str-key': '{{ var1 }}', + 'int-key': 2, + 'nested-key': { + 'nested-key1': { + 'value': '{{ var1 }}', + 'template': True + }, + 'variables': { + 'var1': { + 'value': '{{ var1 }}-modified' + } + }, + 'nested-key-2': {}, + '.defaults': { + 'default1': { + 'value': '{{ var1 }} ', + 'template': True + } + } + + }, + 'list-key': [ + 'list-value1', + { + 'value': '\n\n{{ var2 }}\n\n', + } + ], + 'value-key': { + 'value': '{{ var1 }}', + 'encrypted': True, + 'template': True + } + } + + # And: variables that needs to be applied + variables = { + 'var1': 'var1-value', + 'var2': 'var2-value' + } + + # When: I evaluate object + result = service.evaluate_value(obj, variables) + + # Then: Expected result with evaluated values is returned + + dict_compare(result, { + 'str-key': '{{ var1 }}', + 'int-key': 2, + 'nested-key': { + 'nested-key1': 'var1-value-modified', + 'nested-key-2': { + 'default1': 'var1-value-modified' + }, + }, + 'list-key': [ + 'list-value1', + 'var2-value-modified', + ], + 'value-key': { + 'value': 'var1-value', + 'encrypted': True + } + }) + + +def test_evaluate_variables(): + """ + Should evaluate config variables + :return: None + """ + + # Given: Variables that needs to be expanded + variables = { + 'var1': { + 'value': True + }, + 'var2': { + 'value': '{{var1}}-var2value', + 'template': True, + 'priority': 2, + }, + 'var3': { + 'value': '{{default1}}-var3value', + 'template': True, + 'priority': 1, + }, + 'var4': False + } + + # When: I evaluate the config + result = service.evaluate_variables(variables, { + 'default1': 'default1value' + }) + + # Then: Expected config is returned + dict_compare(result, { + 'var1': 'true', + 'var2': 'true-var2value', + 'var3': 'default1value-var3value', + 'default1': 'default1value', + 'var4': 'false' + }) + + +def test_evaluate_config_with_defaults(): + """ + Should evaluate config as expected + :return: None + """ + + # Given: Config that needs to be evaluated + config = { + 'defaults': {}, + 'variables': { + 'var1': 'value1', + 'var2': { + 'value': '{{var1}}-var2value', + 'template': True, + 'priority': 2, + }, + }, + 'key1': { + 'value': 'test-{{var1}}-{{var2}}-{{var3}}', + 'template': True + }, + 'deployers': { + '.defaults': { + 'dummy': 'value' + }, + 'default': { + 'enabled': True + }, + 'deployer2': { + 'url': 'deployer2-url', + 'enabled': True, + }, + 'deployer3': { + 'dummy': 'value', + 'enabled': { + 'value': '{{ False }}' + } + } + } + } + + # When: I evaluate the config + result = service.evaluate_config(config, { + 'var1': 'default1', + 'var2': 'default2', + 'var3': 'default3' + }, transformations={ + 'boolean-keys': ['enabled'] + }) + + # Then: Expected config is returned + dict_compare(result, { + 'key1': 'test-value1-value1-var2value-default3', + 'deployers': { + 'default': { + 'dummy': 'value', + 'enabled': True + }, + 'deployer2': { + 'dummy': 'value', + 'url': 'deployer2-url', + 'enabled': True + }, + 'deployer3': { + 'enabled': False, + 'dummy': 'value' + } + } + }) + + +def test_transform_string_values(): + """ + Should transform string values inside config as expected. + :return: + """ + + # Given: Config that needs to be transformed + config = { + 'key1': 'value1', + 'port': 1212, + 'enabled': 'True', + 'nested-port-key': { + 'port': u'2321', + 'nodes': u'12', + 'min-nodes': '13', + 'enabled': 'False', + 'force-ssl': 'true' + }, + 'array-config': [ + { + 'port': '123', + 'nodes': '13', + 'min-nodes': '14', + 'attempts': '10', + 'enabled': False + }, + 'testval' + ], + 'null-key': None + } + + # When: I transform string values in config + result = service.transform_string_values(config, transformations={ + 'boolean-keys': ['enabled', 'force-ssl'], + 'number-keys': ['port', 'nodes', 'min-nodes', 'attempts'] + }) + + # Then: Transformed config is returned + dict_compare(result, { + 'key1': 'value1', + 'port': 1212, + 'enabled': True, + 'nested-port-key': { + 'port': 2321, + 'nodes': 12, + 'min-nodes': 13, + 'enabled': False, + 'force-ssl': True + }, + 'array-config': [ + { + 'port': 123, + 'nodes': 13, + 'min-nodes': 14, + 'attempts': 10, + 'enabled': False + }, + 'testval' + ], + 'null-key': None + }) + + +@patch('configservice.services.config.get_provider') +def test_load_config(m_get_provider): + """ + Should load config successfully + :return: + """ + # Given: Existing valid config + cfg1 = { + '.parent': { + 'groups': [] + }, + 'mockkey': 'mockvalue', + 8080: 'number-key', + 'deployers': { + '.defaults': { + 'dummy': 'value' + }, + 'deployer1': { + 'enabled': False, + 'variables': {} + }, + 'deployer2': { + 'enabled': True, + 'variables': {} + } + }, + } + cfg2 = { + '.parent': { + 'groups': [] + }, + 'mockkey2': 'mockvalue2', + 'deployers': { + 'deployer1': { + 'variables': { + 'deployer_url': 'deployer1-url1', + }, + 'url': { + 'value': '{{deployer_url}}' + } + }, + 'deployer2': { + 'variables': { + 'deployer_url': 'deployer2-url1', + }, + 'url': { + 'value': '{{deployer_url}}' + } + } + }, + 'environment': { + 'env1': 'val1' + } + } + m_get_provider.return_value.load.side_effect = [cfg1, cfg2, {}] + + # When: I load the config + loaded_config = config.load_config({ + 'groups': ['mockpath1', 'mockpath2'], + 'evaluate': True, + 'default-config': { + 'deployers': { + '.defaults': { + 'proxy': {} + } + } + }, + }) + + # Then: Config gets loaded as expected + dict_compare(loaded_config, { + 'mockkey': 'mockvalue', + 'mockkey2': 'mockvalue2', + '8080': 'number-key', + 'deployers': { + 'deployer1': { + 'proxy': {}, + 'url': 'deployer1-url1', + 'enabled': False, + 'dummy': 'value' + }, + 'deployer2': { + 'proxy': {}, + 'url': 'deployer2-url1', + 'enabled': True, + 'dummy': 'value' + } + }, + 'environment': { + 'env1': 'val1' + } + }) + + +@raises(ConfigParseError) +@patch('configservice.services.config.get_provider') +def test_load_config_when_config_is_invalid(m_get_provider): + """ + Should raise ConfigParseError when configuration is invalid + :return: + """ + # Given: Existing valid config + m_get_provider.return_value.load.side_effect = ParserError('Mock') + + # When: I load the config + config.load_config({ + 'groups': ['mockpath1', 'mockpath2'], + 'evaluate': True + }) + + # Then: ConfigParseError is raised + + +def test_normalize_config(): + """ + Should normalize the config containing environment variables + """ + # Given: Existing config that needs to be normalized + input_config = { + 'environment': { + 'var1': 'value1', + 'var2': 2, + 'var3': True, + 'var4': { + 'value': 'value4' + }, + 'var5': { + + }, + 'var6': { + 'value': 'value6', + 'encrypted': True + } + }, + 'nested': { + 'environment': { + 'var7': 'value7', + } + }, + 'other': { + 'test-key': 'test-val' + }, + 'direct-string': 'value', + 'direct-int': 1 + } + + # When: I normalize the config + normalized_config = dict(service.normalize_config( + input_config, encrypted_keys=('environment',))) + + # Then: Config gets normalized as expected + dict_compare(normalized_config, { + 'environment': { + 'var1': { + 'value': 'value1', + 'encrypted': False + }, + 'var2': { + 'value': '2', + 'encrypted': False + }, + 'var3': { + 'value': 'True', + 'encrypted': False + }, + 'var4': { + 'value': 'value4', + 'encrypted': False + }, + 'var5': { + 'value': '', + 'encrypted': False + }, + 'var6': { + 'value': 'value6', + 'encrypted': True + } + }, + 'nested': { + 'environment': { + 'var7': { + 'value': 'value7', + 'encrypted': False + } + } + }, + 'other': { + 'test-key': 'test-val' + }, + 'direct-string': 'value', + 'direct-int': 1 + }) + + +def test_expand_groups(): + # Given: Current config groups + groups = ['dir1', 'dir2', '..'] + + # When: I process parent config groups + processed = service._expand_groups(groups) + + # Then: Processed groups are returned + eq_(processed, ['dir1']) + + +def test_expand_groups_without_expansion_paths(): + # Given: Current config groups + groups = ['dir1', 'dir2'] + + # When: I process parent config groups + processed = service._expand_groups(groups) + + # Then: Original groups are returned + eq_(processed, groups) + + +def test_expand_groups_with_multiple_expansion_paths(): + # Given: Current config groups + groups = ['dir1', 'dir2', '..', '..', '..'] + + # When: I process parent config groups + processed = service._expand_groups(groups) + + # Then: Expanded groups are returned + eq_(processed, []) + + +def test_expand_parent_groups(): + # Given: Current config groups + groups = ['dir1', 'dir2', 'dir3'] + parent_groups = ['..', '..', 'dir4'] + + # When: I expand parent config groups + processed = service._expand_parent_groups(groups, parent_groups) + + # Then: Expanded groups are returned + eq_(processed, ['dir1', 'dir4']) + + +def test_expand_empty_parent_groups(): + # Given: Current config groups + groups = ['dir1', 'dir2', 'dir3'] + parent_groups = None + + # When: I expand parent config groups + processed = service._expand_parent_groups(groups, parent_groups) + + # Then: Empty list is returned + eq_(processed, []) + + +def test_expand_parent_groups_with_fist_group_as_non_expansion_path(): + # Given: Current config groups + groups = ['dir1', 'dir2', 'dir3'] + parent_groups = ['dir4', 'dir5', '..', 'dir6'] + + # When: I expand parent config groups + processed = service._expand_parent_groups(groups, parent_groups) + + # Then: Expanded groups are returned + eq_(processed, ['dir4', 'dir6']) diff --git a/tests/unit/test_configservice.py b/tests/unit/test_configservice.py deleted file mode 100644 index 57abd66..0000000 --- a/tests/unit/test_configservice.py +++ /dev/null @@ -1,3 +0,0 @@ - -def test_pass(): - assert True