http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/storage/modeling/type.py ---------------------------------------------------------------------- diff --git a/aria/storage/modeling/type.py b/aria/storage/modeling/type.py new file mode 100644 index 0000000..9e3de3d --- /dev/null +++ b/aria/storage/modeling/type.py @@ -0,0 +1,302 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from collections import namedtuple + +from sqlalchemy import ( + TypeDecorator, + VARCHAR, + event +) +from sqlalchemy.ext import mutable + +from .. import exceptions + + +class _MutableType(TypeDecorator): + """ + Dict representation of type. + """ + @property + def python_type(self): + raise NotImplementedError + + def process_literal_param(self, value, dialect): + pass + + impl = VARCHAR + + def process_bind_param(self, value, dialect): + if value is not None: + value = json.dumps(value) + return value + + def process_result_value(self, value, dialect): + if value is not None: + value = json.loads(value) + return value + + +class Dict(_MutableType): + @property + def python_type(self): + return dict + + +class List(_MutableType): + @property + def python_type(self): + return list + + +class _StrictDictMixin(object): + + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + try: + if not isinstance(value, cls): + if isinstance(value, dict): + for k, v in value.items(): + cls._assert_strict_key(k) + cls._assert_strict_value(v) + return cls(value) + return mutable.MutableDict.coerce(key, value) + else: + return value + except ValueError as e: + raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) + + def __setitem__(self, key, value): + self._assert_strict_key(key) + self._assert_strict_value(value) + super(_StrictDictMixin, self).__setitem__(key, value) + + def setdefault(self, key, value): + self._assert_strict_key(key) + self._assert_strict_value(value) + super(_StrictDictMixin, self).setdefault(key, value) + + def update(self, *args, **kwargs): + for k, v in kwargs.items(): + self._assert_strict_key(k) + self._assert_strict_value(v) + super(_StrictDictMixin, self).update(*args, **kwargs) + + @classmethod + def _assert_strict_key(cls, key): + if cls._key_cls is not None and not isinstance(key, cls._key_cls): + raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format( + cls._key_cls, type(key) + )) + + @classmethod + def _assert_strict_value(cls, value): + if cls._value_cls is not None and not isinstance(value, cls._value_cls): + raise exceptions.StorageError("Value type was set strictly to {0}, but was {1}".format( + cls._value_cls, type(value) + )) + + +class _MutableDict(mutable.MutableDict): + """ + Enables tracking for dict values. + """ + + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + try: + return mutable.MutableDict.coerce(key, value) + except ValueError as e: + raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) + + +class _StrictListMixin(object): + + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + try: + if not isinstance(value, cls): + if isinstance(value, list): + for item in value: + cls._assert_item(item) + return cls(value) + return mutable.MutableList.coerce(key, value) + else: + return value + except ValueError as e: + raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) + + def __setitem__(self, index, value): + """Detect list set events and emit change events.""" + self._assert_item(value) + super(_StrictListMixin, self).__setitem__(index, value) + + def append(self, item): + self._assert_item(item) + super(_StrictListMixin, self).append(item) + + def extend(self, item): + self._assert_item(item) + super(_StrictListMixin, self).extend(item) + + def insert(self, index, item): + self._assert_item(item) + super(_StrictListMixin, self).insert(index, item) + + @classmethod + def _assert_item(cls, item): + if cls._item_cls is not None and not isinstance(item, cls._item_cls): + raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format( + cls._item_cls, type(item) + )) + + +class _MutableList(mutable.MutableList): + + @classmethod + def coerce(cls, key, value): + "Convert plain dictionaries to MutableDict." + try: + return mutable.MutableList.coerce(key, value) + except ValueError as e: + raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) + +_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls') +_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls') + + +class _StrictDict(object): + """ + This entire class functions as a factory for strict dicts and their listeners. + No type class, and no listener type class is created more than once. If a relevant type class + exists it is returned. + """ + _strict_map = {} + + def __call__(self, key_cls=None, value_cls=None): + strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls) + if strict_dict_map_key not in self._strict_map: + key_cls_name = getattr(key_cls, '__name__', str(key_cls)) + value_cls_name = getattr(value_cls, '__name__', str(value_cls)) + # Creating the type class itself. this class would be returned (used by the sqlalchemy + # Column). + strict_dict_cls = type( + 'StrictDict_{0}_{1}'.format(key_cls_name, value_cls_name), + (Dict, ), + {} + ) + # Creating the type listening class. + # The new class inherits from both the _MutableDict class and the _StrictDictMixin, + # while setting the necessary _key_cls and _value_cls as class attributes. + listener_cls = type( + 'StrictMutableDict_{0}_{1}'.format(key_cls_name, value_cls_name), + (_StrictDictMixin, _MutableDict), + {'_key_cls': key_cls, '_value_cls': value_cls} + ) + self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls, + listener_cls=listener_cls) + + return self._strict_map[strict_dict_map_key].type_cls + +StrictDict = _StrictDict() + + +class _StrictList(object): + """ + This entire class functions as a factory for strict lists and their listeners. + No type class, and no listener type class is created more than once. If a relevant type class + exists it is returned. + """ + _strict_map = {} + + def __call__(self, item_cls=None): + + if item_cls not in self._strict_map: + item_cls_name = getattr(item_cls, '__name__', str(item_cls)) + # Creating the type class itself. this class would be returned (used by the sqlalchemy + # Column). + strict_list_cls = type( + 'StrictList_{0}'.format(item_cls_name), + (List, ), + {} + ) + # Creating the type listening class. + # The new class inherits from both the _MutableList class and the _StrictListMixin, + # while setting the necessary _item_cls as class attribute. + listener_cls = type( + 'StrictMutableList_{0}'.format(item_cls_name), + (_StrictListMixin, _MutableList), + {'_item_cls': item_cls} + ) + self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls, + listener_cls=listener_cls) + + return self._strict_map[item_cls].type_cls + +StrictList = _StrictList() + + +def _mutable_association_listener(mapper, cls): + strict_dict_type_to_listener = \ + dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values()) + + strict_list_type_to_listener = \ + dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values()) + + for prop in mapper.column_attrs: + column_type = prop.columns[0].type + # Dict Listeners + if type(column_type) in strict_dict_type_to_listener: # pylint: disable=unidiomatic-typecheck + strict_dict_type_to_listener[type(column_type)].associate_with_attribute( + getattr(cls, prop.key)) + elif isinstance(column_type, Dict): + _MutableDict.associate_with_attribute(getattr(cls, prop.key)) + + # List Listeners + if type(column_type) in strict_list_type_to_listener: # pylint: disable=unidiomatic-typecheck + strict_list_type_to_listener[type(column_type)].associate_with_attribute( + getattr(cls, prop.key)) + elif isinstance(column_type, List): + _MutableList.associate_with_attribute(getattr(cls, prop.key)) +_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener) + + +def _register_mutable_association_listener(): + event.listen(*_LISTENER_ARGS) + + +def remove_mutable_association_listener(): + """ + Remove the event listener that associates ``Dict`` and ``List`` column types with + ``MutableDict`` and ``MutableList``, respectively. + + This call must happen before any model instance is instantiated. + This is because once it does, that would trigger the listener we are trying to remove. + Once it is triggered, many other listeners will then be registered. + At that point, it is too late. + + The reason this function exists is that the association listener, interferes with ARIA change + tracking instrumentation, so a way to disable it is required. + + Note that the event listener this call removes is registered by default. + """ + if event.contains(*_LISTENER_ARGS): + event.remove(*_LISTENER_ARGS) + +_register_mutable_association_listener()
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/storage/modeling/utils.py ---------------------------------------------------------------------- diff --git a/aria/storage/modeling/utils.py b/aria/storage/modeling/utils.py new file mode 100644 index 0000000..75e34f5 --- /dev/null +++ b/aria/storage/modeling/utils.py @@ -0,0 +1,139 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from random import randrange + +from shortuuid import ShortUUID + +from ...utils.console import puts + + +# UUID = ShortUUID() # default alphabet is base57, which is alphanumeric without visually ambiguous +# characters; ID length is 22 +UUID = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123456789') # alphanumeric; ID length is 25 + + +def generate_id_string(length=None): + """ + A random string with a strong guarantee of universal uniqueness (uses UUID). + + The default length is 25 characters. + """ + + the_id = UUID.uuid() + if length is not None: + the_id = the_id[:length] + return the_id + + +def generate_hex_string(): + """ + A random string of 5 hex digits with no guarantee of universal uniqueness. + """ + + return '%05x' % randrange(16 ** 5) + + +def validate_dict_values(context, the_dict): + if not the_dict: + return + validate_list_values(context, the_dict.itervalues()) + + +def validate_list_values(context, the_list): + if not the_list: + return + for value in the_list: + value.validate(context) + + +def coerce_dict_values(context, container, the_dict, report_issues=False): + if not the_dict: + return + coerce_list_values(context, container, the_dict.itervalues(), report_issues) + + +def coerce_list_values(context, container, the_list, report_issues=False): + if not the_list: + return + for value in the_list: + value.coerce_values(context, container, report_issues) + + +def instantiate_dict(context, container, the_dict, from_dict): + if not from_dict: + return + for name, value in from_dict.iteritems(): + value = value.instantiate(context, container) + if value is not None: + the_dict[name] = value + + +def dump_list_values(context, the_list, name): + if not the_list: + return + puts('%s:' % name) + with context.style.indent: + for value in the_list: + value.dump(context) + + +def dump_dict_values(context, the_dict, name): + if not the_dict: + return + dump_list_values(context, the_dict.itervalues(), name) + + +def dump_parameters(context, parameters, name='Properties'): + if not parameters: + return + puts('%s:' % name) + with context.style.indent: + for parameter_name, parameter in parameters.iteritems(): + if parameter.type_name is not None: + puts('%s = %s (%s)' % (context.style.property(parameter_name), + context.style.literal(parameter.value), + context.style.type(parameter.type_name))) + else: + puts('%s = %s' % (context.style.property(parameter_name), + context.style.literal(parameter.value))) + if parameter.description: + puts(context.style.meta(parameter.description)) + + +def dump_interfaces(context, interfaces, name='Interfaces'): + if not interfaces: + return + puts('%s:' % name) + with context.style.indent: + for interface in interfaces.itervalues(): + interface.dump(context) + + +def pluralize(noun): + if noun.endswith('s'): + return '{0}es'.format(noun) + elif noun.endswith('y'): + return '{0}ies'.format(noun[:-1]) + else: + return '{0}s'.format(noun) + + +class classproperty(object): # pylint: disable=invalid-name + def __init__(self, f): + self._func = f + + def __get__(self, instance, owner): + return self._func(owner) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/storage/structure.py ---------------------------------------------------------------------- diff --git a/aria/storage/structure.py b/aria/storage/structure.py deleted file mode 100644 index fa592ac..0000000 --- a/aria/storage/structure.py +++ /dev/null @@ -1,190 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Aria's storage.structures module -Path: aria.storage.structures - -models module holds aria's models. - -classes: - * Field - represents a single field. - * IterField - represents an iterable field. - * PointerField - represents a single pointer field. - * IterPointerField - represents an iterable pointers field. - * Model - abstract model implementation. -""" - -from sqlalchemy.orm import relationship, backref -from sqlalchemy.ext import associationproxy -from sqlalchemy import ( - Column, - ForeignKey, - Integer, - Text -) - - -class ModelMixin(object): - - @classmethod - def id_column_name(cls): - raise NotImplementedError - - @classmethod - def name_column_name(cls): - raise NotImplementedError - - @classmethod - def _get_cls_by_tablename(cls, tablename): - """Return class reference mapped to table. - - :param tablename: String with name of table. - :return: Class reference or None. - """ - if tablename in (cls.__name__, cls.__tablename__): - return cls - - for table_cls in cls._decl_class_registry.values(): - if tablename in (getattr(table_cls, '__name__', None), - getattr(table_cls, '__tablename__', None)): - return table_cls - - @classmethod - def foreign_key(cls, table, nullable=False): - """Return a ForeignKey object with the relevant - - :param table: Unique id column in the parent table - :param nullable: Should the column be allowed to remain empty - """ - table_cls = cls._get_cls_by_tablename(table.__tablename__) - foreign_key_str = '{tablename}.{unique_id}'.format(tablename=table_cls.__tablename__, - unique_id=table_cls.id_column_name()) - column = Column(ForeignKey(foreign_key_str, ondelete='CASCADE'), - nullable=nullable) - column.__remote_table_name = table_cls.__name__ - return column - - @classmethod - def one_to_many_relationship(cls, - foreign_key_column, - backreference=None, - backref_kwargs=None, - **kwargs): - """Return a one-to-many SQL relationship object - Meant to be used from inside the *child* object - - :param parent_class: Class of the parent table - :param cls: Class of the child table - :param foreign_key_column: The column of the foreign key (from the child table) - :param backreference: The name to give to the reference to the child (on the parent table) - """ - backref_kwargs = backref_kwargs or {} - parent_table = cls._get_cls_by_tablename( - getattr(cls, foreign_key_column).__remote_table_name) - primaryjoin_str = '{parent_class_name}.{parent_unique_id} == ' \ - '{child_class.__name__}.{foreign_key_column}'\ - .format( - parent_class_name=parent_table.__name__, - parent_unique_id=parent_table.id_column_name(), - child_class=cls, - foreign_key_column=foreign_key_column - ) - return relationship( - parent_table.__name__, - primaryjoin=primaryjoin_str, - foreign_keys=[getattr(cls, foreign_key_column)], - # The following line make sure that when the *parent* is - # deleted, all its connected children are deleted as well - backref=backref(backreference or cls.__tablename__, cascade='all', **backref_kwargs), - **kwargs - ) - - @classmethod - def relationship_to_self(cls, local_column): - - remote_side_str = '{cls.__name__}.{remote_column}'.format( - cls=cls, - remote_column=cls.id_column_name() - ) - primaryjoin_str = '{remote_side_str} == {cls.__name__}.{local_column}'.format( - remote_side_str=remote_side_str, - cls=cls, - local_column=local_column) - return relationship(cls.__name__, - primaryjoin=primaryjoin_str, - remote_side=remote_side_str, - post_update=True) - - def to_dict(self, fields=None, suppress_error=False): - """Return a dict representation of the model - - :param suppress_error: If set to True, sets `None` to attributes that - it's unable to retrieve (e.g., if a relationship wasn't established - yet, and so it's impossible to access a property through it) - """ - res = dict() - fields = fields or self.fields() - for field in fields: - try: - field_value = getattr(self, field) - except AttributeError: - if suppress_error: - field_value = None - else: - raise - if isinstance(field_value, list): - field_value = list(field_value) - elif isinstance(field_value, dict): - field_value = dict(field_value) - elif isinstance(field_value, ModelMixin): - field_value = field_value.to_dict() - res[field] = field_value - - return res - - @classmethod - def _association_proxies(cls): - for col, value in vars(cls).items(): - if isinstance(value, associationproxy.AssociationProxy): - yield col - - @classmethod - def fields(cls): - """Return the list of field names for this table - - Mostly for backwards compatibility in the code (that uses `fields`) - """ - fields = set(cls._association_proxies()) - fields.update(cls.__table__.columns.keys()) - return fields - set(getattr(cls, '_private_fields', [])) - - def __repr__(self): - return '<{__class__.__name__} id=`{id}`>'.format( - __class__=self.__class__, - id=getattr(self, self.name_column_name())) - - -class ModelIDMixin(object): - id = Column(Integer, primary_key=True, autoincrement=True) - name = Column(Text, nullable=True, index=True) - - @classmethod - def id_column_name(cls): - return 'id' - - @classmethod - def name_column_name(cls): - return 'name' http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/storage/type.py ---------------------------------------------------------------------- diff --git a/aria/storage/type.py b/aria/storage/type.py deleted file mode 100644 index ac695b1..0000000 --- a/aria/storage/type.py +++ /dev/null @@ -1,299 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -from collections import namedtuple - -from sqlalchemy import ( - TypeDecorator, - VARCHAR, - event -) -from sqlalchemy.ext import mutable - -from . import exceptions - - -class _MutableType(TypeDecorator): - """ - Dict representation of type. - """ - @property - def python_type(self): - raise NotImplementedError - - def process_literal_param(self, value, dialect): - pass - - impl = VARCHAR - - def process_bind_param(self, value, dialect): - if value is not None: - value = json.dumps(value) - return value - - def process_result_value(self, value, dialect): - if value is not None: - value = json.loads(value) - return value - - -class Dict(_MutableType): - @property - def python_type(self): - return dict - - -class List(_MutableType): - @property - def python_type(self): - return list - - -class _StrictDictMixin(object): - - @classmethod - def coerce(cls, key, value): - "Convert plain dictionaries to MutableDict." - try: - if not isinstance(value, cls): - if isinstance(value, dict): - for k, v in value.items(): - cls._assert_strict_key(k) - cls._assert_strict_value(v) - return cls(value) - return mutable.MutableDict.coerce(key, value) - else: - return value - except ValueError as e: - raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) - - def __setitem__(self, key, value): - self._assert_strict_key(key) - self._assert_strict_value(value) - super(_StrictDictMixin, self).__setitem__(key, value) - - def setdefault(self, key, value): - self._assert_strict_key(key) - self._assert_strict_value(value) - super(_StrictDictMixin, self).setdefault(key, value) - - def update(self, *args, **kwargs): - for k, v in kwargs.items(): - self._assert_strict_key(k) - self._assert_strict_value(v) - super(_StrictDictMixin, self).update(*args, **kwargs) - - @classmethod - def _assert_strict_key(cls, key): - if cls._key_cls is not None and not isinstance(key, cls._key_cls): - raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format( - cls._key_cls, type(key) - )) - - @classmethod - def _assert_strict_value(cls, value): - if cls._value_cls is not None and not isinstance(value, cls._value_cls): - raise exceptions.StorageError("Value type was set strictly to {0}, but was {1}".format( - cls._value_cls, type(value) - )) - - -class _MutableDict(mutable.MutableDict): - """ - Enables tracking for dict values. - """ - - @classmethod - def coerce(cls, key, value): - "Convert plain dictionaries to MutableDict." - try: - return mutable.MutableDict.coerce(key, value) - except ValueError as e: - raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) - - -class _StrictListMixin(object): - - @classmethod - def coerce(cls, key, value): - "Convert plain dictionaries to MutableDict." - try: - if not isinstance(value, cls): - if isinstance(value, list): - for item in value: - cls._assert_item(item) - return cls(value) - return mutable.MutableList.coerce(key, value) - else: - return value - except ValueError as e: - raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) - - def __setitem__(self, index, value): - """Detect list set events and emit change events.""" - self._assert_item(value) - super(_StrictListMixin, self).__setitem__(index, value) - - def append(self, item): - self._assert_item(item) - super(_StrictListMixin, self).append(item) - - def extend(self, item): - self._assert_item(item) - super(_StrictListMixin, self).extend(item) - - def insert(self, index, item): - self._assert_item(item) - super(_StrictListMixin, self).insert(index, item) - - @classmethod - def _assert_item(cls, item): - if cls._item_cls is not None and not isinstance(item, cls._item_cls): - raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format( - cls._item_cls, type(item) - )) - - -class _MutableList(mutable.MutableList): - - @classmethod - def coerce(cls, key, value): - "Convert plain dictionaries to MutableDict." - try: - return mutable.MutableList.coerce(key, value) - except ValueError as e: - raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e))) - -_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls') -_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls') - - -class _StrictDict(object): - """ - This entire class functions as a factory for strict dicts and their listeners. - No type class, and no listener type class is created more than once. If a relevant type class - exists it is returned. - """ - _strict_map = {} - - def __call__(self, key_cls=None, value_cls=None): - strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls) - if strict_dict_map_key not in self._strict_map: - # Creating the type class itself. this class would be returned (used by the sqlalchemy - # Column). - strict_dict_cls = type( - 'StrictDict_{0}_{1}'.format(key_cls.__name__, value_cls.__name__), - (Dict, ), - {} - ) - # Creating the type listening class. - # The new class inherits from both the _MutableDict class and the _StrictDictMixin, - # while setting the necessary _key_cls and _value_cls as class attributes. - listener_cls = type( - 'StrictMutableDict_{0}_{1}'.format(key_cls.__name__, value_cls.__name__), - (_StrictDictMixin, _MutableDict), - {'_key_cls': key_cls, '_value_cls': value_cls} - ) - self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls, - listener_cls=listener_cls) - - return self._strict_map[strict_dict_map_key].type_cls - -StrictDict = _StrictDict() - - -class _StrictList(object): - """ - This entire class functions as a factory for strict lists and their listeners. - No type class, and no listener type class is created more than once. If a relevant type class - exists it is returned. - """ - _strict_map = {} - - def __call__(self, item_cls=None): - - if item_cls not in self._strict_map: - # Creating the type class itself. this class would be returned (used by the sqlalchemy - # Column). - strict_list_cls = type( - 'StrictList_{0}'.format(item_cls.__name__), - (List, ), - {} - ) - # Creating the type listening class. - # The new class inherits from both the _MutableList class and the _StrictListMixin, - # while setting the necessary _item_cls as class attribute. - listener_cls = type( - 'StrictMutableList_{0}'.format(item_cls.__name__), - (_StrictListMixin, _MutableList), - {'_item_cls': item_cls} - ) - self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls, - listener_cls=listener_cls) - - return self._strict_map[item_cls].type_cls - -StrictList = _StrictList() - - -def _mutable_association_listener(mapper, cls): - strict_dict_type_to_listener = \ - dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values()) - - strict_list_type_to_listener = \ - dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values()) - - for prop in mapper.column_attrs: - column_type = prop.columns[0].type - # Dict Listeners - if type(column_type) in strict_dict_type_to_listener: # pylint: disable=unidiomatic-typecheck - strict_dict_type_to_listener[type(column_type)].associate_with_attribute( - getattr(cls, prop.key)) - elif isinstance(column_type, Dict): - _MutableDict.associate_with_attribute(getattr(cls, prop.key)) - - # List Listeners - if type(column_type) in strict_list_type_to_listener: # pylint: disable=unidiomatic-typecheck - strict_list_type_to_listener[type(column_type)].associate_with_attribute( - getattr(cls, prop.key)) - elif isinstance(column_type, List): - _MutableList.associate_with_attribute(getattr(cls, prop.key)) -_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener) - - -def _register_mutable_association_listener(): - event.listen(*_LISTENER_ARGS) - - -def remove_mutable_association_listener(): - """ - Remove the event listener that associates ``Dict`` and ``List`` column types with - ``MutableDict`` and ``MutableList``, respectively. - - This call must happen before any model instance is instantiated. - This is because once it does, that would trigger the listener we are trying to remove. - Once it is triggered, many other listeners will then be registered. - At that point, it is too late. - - The reason this function exists is that the association listener, interferes with ARIA change - tracking instrumentation, so a way to disable it is required. - - Note that the event listener this call removes is registered by default. - """ - if event.contains(*_LISTENER_ARGS): - event.remove(*_LISTENER_ARGS) - -_register_mutable_association_listener() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/storage_initializer.py ---------------------------------------------------------------------- diff --git a/aria/storage_initializer.py b/aria/storage_initializer.py new file mode 100644 index 0000000..0386baa --- /dev/null +++ b/aria/storage_initializer.py @@ -0,0 +1,135 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +from threading import RLock + +from .storage import model +from .orchestrator import operation +from .utils.formatting import safe_repr +from .utils.console import puts, Colored + + +def initialize_storage(context, model_storage, deployment_id): + blueprint = _create_blueprint(context) + model_storage.blueprint.put(blueprint) + + deployment = _create_deployment(context, blueprint, deployment_id) + model_storage.deployment.put(deployment) + + # Create nodes and node instances + for node_template in context.modeling.model.node_templates.values(): + model_storage.node_template.put(node_template) + + for a_node in context.modeling.instance.find_nodes(node_template.name): + node = _create_node_instance(deployment, node_template, a_node) + model_storage.node.put(node) + + # Create relationships + for node_template in context.modeling.model.node_templates.values(): + for index, requirement_template in enumerate(node_template.requirement_templates): + # We are currently limited only to requirements for specific node templates! + if requirement_template.target_node_template_name: + model_storage.requirement_template.put(requirement_template) + + for node in context.modeling.instance.find_nodes(node_template.name): + for relationship_model in node.relationships: + if relationship_model.source_requirement_index == index: + source_instance = \ + model_storage.node_instance.get_by_name(node.id) + target_instance = \ + model_storage.node_instance.get_by_name( + relationship_model.target_node_id) + relationship = \ + _create_relationship_instance(source_instance, target_instance) + model_storage.relationship.put(relationship) + + +def _create_blueprint(context): + now = datetime.utcnow() + main_file_name = unicode(context.presentation.location) + try: + name = context.modeling.model.metadata.values.get('template_name') + except AttributeError: + name = None + + return model.ServiceTemplate( + plan={}, + name=name or main_file_name, + description=context.modeling.model.description or '', + created_at=now, + updated_at=now, + main_file_name=main_file_name + ) + + +def _create_deployment(context, service_template, service_instance_id): + now = datetime.utcnow() + return model.ServiceInstance( + name='{0}_{1}'.format(service_template.name, service_instance_id), + service_template=service_template, + description=context.modeling.instance.description or '', + created_at=now, + updated_at=now, + workflows={}, + permalink='', + policy_triggers={}, + scaling_groups={} + ) + + +def _create_node_instance(service_instance, node, node_model): + return model.Node( + service_instance=service_instance, + name=node_model.id, + runtime_properties={}, + version=None, + node_template=node, + state='', + scaling_groups=[] + ) + + +def _create_relationship_instance(source_instance, target_instance): + return model.Relationship( + source_node=source_instance, + target_node=target_instance + ) + + +_TERMINAL_LOCK = RLock() + + +@operation +def _dry_node(ctx, _plugin, _implementation, **kwargs): + with _TERMINAL_LOCK: + print '> node instance: %s' % Colored.red(ctx.node_instance.name) + _dump_implementation(_plugin, _implementation) + + +@operation +def _dry_relationship(ctx, _plugin, _implementation, **kwargs): + with _TERMINAL_LOCK: + puts('> relationship instance: %s -> %s' % ( + Colored.red(ctx.relationship_instance.source_node_instance.name), + Colored.red(ctx.relationship_instance.target_node_instance.name))) + _dump_implementation(_plugin, _implementation) + + +def _dump_implementation(plugin, implementation): + if plugin: + print ' plugin: %s' % Colored.magenta(plugin) + if implementation: + print ' implementation: %s' % Colored.yellow(safe_repr(implementation)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/aria/utils/application.py ---------------------------------------------------------------------- diff --git a/aria/utils/application.py b/aria/utils/application.py index 113e054..161a9cb 100644 --- a/aria/utils/application.py +++ b/aria/utils/application.py @@ -102,14 +102,14 @@ class StorageManager(LoggerMixin): assert hasattr(self.model_storage, 'blueprint') self.logger.debug('creating blueprint resource storage entry') - self.resource_storage.blueprint.upload( + self.resource_storage.service_template.upload( entry_id=self.blueprint_id, source=os.path.dirname(source)) self.logger.debug('created blueprint resource storage entry') self.logger.debug('creating blueprint model storage entry') now = datetime.utcnow() - blueprint = self.model_storage.blueprint.model_cls( + blueprint = self.model_storage.service_template.model_cls( plan=self.blueprint_plan, id=self.blueprint_id, description=self.blueprint_plan.get('description'), @@ -117,7 +117,7 @@ class StorageManager(LoggerMixin): updated_at=now, main_file_name=main_file_name, ) - self.model_storage.blueprint.put(blueprint) + self.model_storage.service_template.put(blueprint) self.logger.debug('created blueprint model storage entry') def create_nodes_storage(self): @@ -164,10 +164,10 @@ class StorageManager(LoggerMixin): self.logger.debug('creating deployment resource storage entry') temp_dir = tempfile.mkdtemp() try: - self.resource_storage.blueprint.download( + self.resource_storage.service_template.download( entry_id=self.blueprint_id, destination=temp_dir) - self.resource_storage.deployment.upload( + self.resource_storage.service_instance.upload( entry_id=self.deployment_id, source=temp_dir) finally: @@ -176,7 +176,7 @@ class StorageManager(LoggerMixin): self.logger.debug('creating deployment model storage entry') now = datetime.utcnow() - deployment = self.model_storage.deployment.model_cls( + deployment = self.model_storage.service_instance.model_cls( id=self.deployment_id, blueprint_id=self.blueprint_id, description=self.deployment_plan['description'], @@ -190,7 +190,7 @@ class StorageManager(LoggerMixin): created_at=now, updated_at=now ) - self.model_storage.deployment.put(deployment) + self.model_storage.service_instance.put(deployment) self.logger.debug('created deployment model storage entry') def create_node_instances_storage(self): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml ---------------------------------------------------------------------- diff --git a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml index 08da2f3..a252a7c 100644 --- a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml +++ b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/groups.yaml @@ -24,5 +24,5 @@ group_types: description: >- This is the default (root) TOSCA Group Type definition that all other TOSCA base Group Types derive from. interfaces: - standard: + Standard: type: tosca.interfaces.node.lifecycle.Standard http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/mock/context.py ---------------------------------------------------------------------- diff --git a/tests/mock/context.py b/tests/mock/context.py index ec4bfb8..74ec31c 100644 --- a/tests/mock/context.py +++ b/tests/mock/context.py @@ -25,7 +25,7 @@ from .topology import create_simple_topology_two_nodes def simple(mapi_kwargs, resources_dir=None, **kwargs): model_storage = aria.application_model_storage(SQLAlchemyModelAPI, api_kwargs=mapi_kwargs) - deployment_id = create_simple_topology_two_nodes(model_storage) + service_instnce_id = create_simple_topology_two_nodes(model_storage) # pytest tmpdir if resources_dir: @@ -40,7 +40,7 @@ def simple(mapi_kwargs, resources_dir=None, **kwargs): name='simple_context', model_storage=model_storage, resource_storage=resource_storage, - deployment_id=deployment_id, + service_instance_id=service_instnce_id, workflow_name=models.WORKFLOW_NAME, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/mock/models.py ---------------------------------------------------------------------- diff --git a/tests/mock/models.py b/tests/mock/models.py index 8229038..047526a 100644 --- a/tests/mock/models.py +++ b/tests/mock/models.py @@ -15,8 +15,7 @@ from datetime import datetime -from aria.storage import model - +from aria.storage.modeling import model from . import operations DEPLOYMENT_NAME = 'test_deployment_id' @@ -35,84 +34,78 @@ RELATIONSHIP_INSTANCE_NAME = 'relationship_instance' def get_dependency_node(deployment): - return model.Node( + return model.NodeTemplate( name=DEPENDENCY_NODE_NAME, - type='test_node_type', + type_name='test_node_type', type_hierarchy=[], - number_of_instances=1, - planned_number_of_instances=1, - deploy_number_of_instances=1, - properties={}, - operations=dict((key, {}) for key in operations.NODE_OPERATIONS), - min_number_of_instances=1, - max_number_of_instances=1, - deployment_fk=deployment.id + default_instances=1, + min_instances=1, + max_instances=1, + service_template=deployment.service_template, ) -def get_dependency_node_instance(dependency_node): - return model.NodeInstance( +def get_dependency_node_instance(dependency_node, deployment): + return model.Node( name=DEPENDENCY_NODE_INSTANCE_NAME, + service_instance=deployment, runtime_properties={'ip': '1.1.1.1'}, version=None, - node_fk=dependency_node.id, + node_template=dependency_node, state='', scaling_groups=[] ) -def get_relationship(source=None, target=None): - return model.Relationship( - source_node_fk=source.id, - target_node_fk=target.id, - source_interfaces={}, - source_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS), - target_interfaces={}, - target_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS), - type='rel_type', - type_hierarchy=[], - properties={}, - ) +def get_relationship(target): + requirement_template = model.RequirementTemplate(target_node_template_name=target.name) + capability_template = model.CapabilityTemplate() + + return requirement_template, capability_template -def get_relationship_instance(source_instance, target_instance, relationship): - return model.RelationshipInstance( - relationship_fk=relationship.id, - target_node_instance_fk=target_instance.id, - source_node_instance_fk=source_instance.id, +def get_relationship_instance(source_instance, target_instance): + return model.Relationship( + target_node=target_instance, + source_node=source_instance, ) -def get_dependent_node(deployment): - return model.Node( +def get_dependent_node(deployment, requirement_template, capability_template): + operation_templates = [model.OperationTemplate(implementation=op, + service_template=deployment.service_template) + for op in operations.NODE_OPERATIONS] + interface_template = model.InterfaceTemplate(operation_templates=operation_templates) + + return model.NodeTemplate( name=DEPENDENT_NODE_NAME, - deployment_fk=deployment.id, - type='test_node_type', + type_name='test_node_type', type_hierarchy=[], - number_of_instances=1, - planned_number_of_instances=1, - deploy_number_of_instances=1, - properties={}, - operations=dict((key, {}) for key in operations.NODE_OPERATIONS), - min_number_of_instances=1, - max_number_of_instances=1, + default_instances=1, + min_instances=1, + max_instances=1, + service_template=deployment.service_template, + interface_templates=[interface_template], + requirement_templates=[requirement_template], + capability_templates=[capability_template], ) -def get_dependent_node_instance(dependent_node): - return model.NodeInstance( +def get_dependent_node_instance(dependent_node, deployment): + return model.Node( name=DEPENDENT_NODE_INSTANCE_NAME, + service_instance=deployment, runtime_properties={}, version=None, - node_fk=dependent_node.id, + node_template=dependent_node, state='', - scaling_groups=[] + scaling_groups=[], ) def get_blueprint(): now = datetime.now() - return model.Blueprint( + return model.ServiceTemplate( plan={}, name=BLUEPRINT_NAME, description=None, @@ -124,7 +117,7 @@ def get_blueprint(): def get_execution(deployment): return model.Execution( - deployment_fk=deployment.id, + service_instance=deployment, status=model.Execution.STARTED, workflow_name=WORKFLOW_NAME, started_at=datetime.utcnow(), @@ -134,19 +127,16 @@ def get_execution(deployment): def get_deployment(blueprint): now = datetime.utcnow() - return model.Deployment( + return model.ServiceInstance( name=DEPLOYMENT_NAME, - blueprint_fk=blueprint.id, + service_template=blueprint, description='', created_at=now, updated_at=now, workflows={}, - inputs={}, - groups={}, permalink='', policy_triggers={}, policy_types={}, - outputs={}, scaling_groups={}, ) @@ -165,3 +155,28 @@ def get_plugin(package_name='package', package_version='0.1'): uploaded_at=datetime.now(), wheels=[], ) + + +def get_interface_template(operation_name, operation_kwargs=None, interface_kwargs=None): + operation_template = model.OperationTemplate( + name=operation_name, + **(operation_kwargs or {}) + + ) + return model.InterfaceTemplate( + operation_templates=[operation_template], + name=operation_name.rsplit('.', 1)[0], + **(interface_kwargs or {}) + ) + + +def get_interface(operation_name, + operation_kwargs=None, + interface_kwargs=None, + edge=None): + operation = model.Operation(name=operation_name, **(operation_kwargs or {})) + interface_name = operation_name.rsplit('.', 1)[0] + return model.Interface(operations=[operation], + name=interface_name, + edge=edge, + **(interface_kwargs or {})) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/mock/topology.py ---------------------------------------------------------------------- diff --git a/tests/mock/topology.py b/tests/mock/topology.py index e219c33..67d1378 100644 --- a/tests/mock/topology.py +++ b/tests/mock/topology.py @@ -13,84 +13,73 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime - from aria.storage import model from . import models -def create_simple_topology_single_node(model_storage, deployment_id, create_operation): - now = datetime.utcnow() - - blueprint = model.Blueprint(name='mock-blueprint', - created_at=now, - updated_at=now, - plan={}, - main_file_name='mock-file') - model_storage.blueprint.put(blueprint) - - deployment = model.Deployment(name='mock-deployment-%d' % deployment_id, - blueprint_fk=blueprint.id, - created_at=now, - updated_at=now) - model_storage.deployment.put(deployment) - - node = model.Node(name='mock-node', - type='tosca.nodes.Compute', - operations={ - 'tosca.interfaces.node.lifecycle.Standard.create': { - 'operation': create_operation, - 'inputs': { - 'key': 'create', - 'value': True}}}, - number_of_instances=1, - planned_number_of_instances=1, - deploy_number_of_instances=1, - min_number_of_instances=1, - max_number_of_instances=1, - deployment_fk=deployment.id) +def create_simple_topology_single_node(model_storage, create_operation): + service_template = models.get_blueprint() + model_storage.service_template.put(service_template) + + service_instance = models.get_deployment(service_template) + model_storage.service_instance.put(service_instance) + + node_template = models.get_dependency_node(service_instance) + node_template.interface_templates = [models.get_interface_template( + 'tosca.interfaces.node.lifecycle.Standard.create', + operation_kwargs=dict( + implementation=create_operation, + inputs=[model.Parameter(name='key', value='create', type='str'), + model.Parameter(name='value', value=True, type='bool')] + ) + )] + model_storage.node_template.put(node_template) + + node = models.get_dependency_node_instance(node_template, service_instance) + node.interfaces = [models.get_interface( + 'tosca.interfaces.node.lifecycle.Standard.create', + operation_kwargs=dict(implementation=create_operation, + inputs=[model.Parameter(name='key', value='create', type='str'), + model.Parameter(name='value', value=True, type='bool')]) + )] model_storage.node.put(node) - node_instance = model.NodeInstance(name='mock-node-instance', - state='', - node_fk=node.id) - model_storage.node_instance.put(node_instance) - def create_simple_topology_two_nodes(model_storage): blueprint = models.get_blueprint() - model_storage.blueprint.put(blueprint) + model_storage.service_template.put(blueprint) deployment = models.get_deployment(blueprint) - model_storage.deployment.put(deployment) + model_storage.service_instance.put(deployment) ################################################################################# # Creating a simple deployment with node -> node as a graph dependency_node = models.get_dependency_node(deployment) - model_storage.node.put(dependency_node) - storage_dependency_node = model_storage.node.get(dependency_node.id) + model_storage.node_template.put(dependency_node) + storage_dependency_node = model_storage.node_template.get(dependency_node.id) + + dependency_node_instance = models.get_dependency_node_instance(storage_dependency_node, + deployment) + model_storage.node.put(dependency_node_instance) + storage_dependency_node_instance = model_storage.node.get(dependency_node_instance.id) - dependency_node_instance = models.get_dependency_node_instance(storage_dependency_node) - model_storage.node_instance.put(dependency_node_instance) - storage_dependency_node_instance = model_storage.node_instance.get(dependency_node_instance.id) + req_template, cap_template = models.get_relationship(storage_dependency_node) + model_storage.requirement_template.put(req_template) + model_storage.capability_template.put(cap_template) - dependent_node = models.get_dependent_node(deployment) - model_storage.node.put(dependent_node) - storage_dependent_node = model_storage.node.get(dependent_node.id) + dependent_node = models.get_dependent_node(deployment, req_template, cap_template) + model_storage.node_template.put(dependent_node) + storage_dependent_node = model_storage.node_template.get(dependent_node.id) - dependent_node_instance = models.get_dependent_node_instance(storage_dependent_node) - model_storage.node_instance.put(dependent_node_instance) - storage_dependent_node_instance = model_storage.node_instance.get(dependent_node_instance.id) + dependent_node_instance = models.get_dependent_node_instance(storage_dependent_node, deployment) + model_storage.node.put(dependent_node_instance) + storage_dependent_node_instance = model_storage.node.get(dependent_node_instance.id) - relationship = models.get_relationship(storage_dependent_node, storage_dependency_node) - model_storage.relationship.put(relationship) - storage_relationship = model_storage.relationship.get(relationship.id) relationship_instance = models.get_relationship_instance( - relationship=storage_relationship, target_instance=storage_dependency_node_instance, source_instance=storage_dependent_node_instance ) - model_storage.relationship_instance.put(relationship_instance) + model_storage.relationship.put(relationship_instance) return deployment.id http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/context/test_operation.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py index b0918d1..35db048 100644 --- a/tests/orchestrator/context/test_operation.py +++ b/tests/orchestrator/context/test_operation.py @@ -55,14 +55,13 @@ def executor(): def test_node_operation_task_execution(ctx, executor): operation_name = 'aria.interfaces.lifecycle.create' - node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) - node.operations[operation_name] = { - 'operation': op_path(my_operation, module_path=__name__) - - } + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) + interface = mock.models.get_interface( + operation_name, + operation_kwargs=dict(implementation=op_path(my_operation, module_path=__name__)) + ) + node.interfaces = [interface] ctx.model.node.update(node) - node_instance = ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - inputs = {'putput': True} @workflow @@ -70,77 +69,80 @@ def test_node_operation_task_execution(ctx, executor): graph.add_tasks( api.task.OperationTask.node_instance( name=operation_name, - instance=node_instance, + instance=node, inputs=inputs ) ) execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor) - operation_context = global_test_holder[op_name(node_instance, operation_name)] + operation_context = global_test_holder[op_name(node, operation_name)] assert isinstance(operation_context, context.operation.NodeOperationContext) # Task bases assertions - assert operation_context.task.actor == node_instance - assert operation_context.task.name == op_name(node_instance, operation_name) - assert operation_context.task.operation_mapping == node.operations[operation_name]['operation'] + assert operation_context.task.actor == node + assert operation_context.task.name == op_name(node, operation_name) + operations = interface.operations.filter_by(name=operation_name) # pylint: disable=no-member + assert operations.count() == 1 + assert operation_context.task.implementation == operations[0].implementation assert operation_context.task.inputs == inputs # Context based attributes (sugaring) - assert operation_context.node == node_instance.node - assert operation_context.node_instance == node_instance + assert operation_context.node_template == node.node_template + assert operation_context.node == node def test_relationship_operation_task_execution(ctx, executor): - operation_name = 'aria.interfaces.relationship_lifecycle.postconfigure' + operation_name = 'aria.interfaces.relationship_lifecycle.post_configure' relationship = ctx.model.relationship.list()[0] - relationship.source_operations[operation_name] = { - 'operation': op_path(my_operation, module_path=__name__) - } - ctx.model.relationship.update(relationship) - relationship_instance = ctx.model.relationship_instance.list()[0] - dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) - dependency_node_instance = \ - ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME) - dependent_node_instance = \ - ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME) + interface = mock.models.get_interface( + operation_name=operation_name, + operation_kwargs=dict(implementation=op_path(my_operation, module_path=__name__)), + edge='source' + ) + relationship.interfaces = [interface] + ctx.model.relationship.update(relationship) inputs = {'putput': True} @workflow def basic_workflow(graph, **_): graph.add_tasks( api.task.OperationTask.relationship_instance( - instance=relationship_instance, + instance=relationship, name=operation_name, - operation_end=api.task.OperationTask.SOURCE_OPERATION, - inputs=inputs + inputs=inputs, + edge='source' ) ) execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor) - operation_context = global_test_holder[op_name(relationship_instance, operation_name)] + operation_context = global_test_holder[op_name(relationship, + operation_name)] assert isinstance(operation_context, context.operation.RelationshipOperationContext) # Task bases assertions - assert operation_context.task.actor == relationship_instance - assert operation_context.task.name == op_name(relationship_instance, operation_name) - assert operation_context.task.operation_mapping == \ - relationship.source_operations[operation_name]['operation'] + assert operation_context.task.actor == relationship + assert operation_context.task.name.startswith(operation_name) + operation = interface.operations.filter_by(name=operation_name) # pylint: disable=no-member + assert operation_context.task.implementation == operation.all()[0].implementation assert operation_context.task.inputs == inputs # Context based attributes (sugaring) + dependency_node_template = ctx.model.node_template.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) + dependent_node_template = ctx.model.node_template.get_by_name(mock.models.DEPENDENT_NODE_NAME) + dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME) + + assert operation_context.target_node_template == dependency_node_template assert operation_context.target_node == dependency_node - assert operation_context.target_node_instance == dependency_node_instance assert operation_context.relationship == relationship - assert operation_context.relationship_instance == relationship_instance + assert operation_context.source_node_template == dependent_node_template assert operation_context.source_node == dependent_node - assert operation_context.source_node_instance == dependent_node_instance def test_invalid_task_operation_id(ctx, executor): @@ -152,39 +154,42 @@ def test_invalid_task_operation_id(ctx, executor): :return: """ operation_name = 'aria.interfaces.lifecycle.create' - other_node_instance, node_instance = ctx.model.node_instance.list() - assert other_node_instance.id == 1 - assert node_instance.id == 2 - - node = node_instance.node - node.operations[operation_name] = { - 'operation': op_path(get_node_instance_id, module_path=__name__) - - } + other_node, node = ctx.model.node.list() + assert other_node.id == 1 + assert node.id == 2 + + interface = mock.models.get_interface( + operation_name=operation_name, + operation_kwargs=dict(implementation=op_path(get_node_instance_id, module_path=__name__)) + ) + node.interfaces = [interface] ctx.model.node.update(node) @workflow def basic_workflow(graph, **_): graph.add_tasks( - api.task.OperationTask.node_instance(name=operation_name, instance=node_instance) + api.task.OperationTask.node_instance(name=operation_name, instance=node) ) execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor) - op_node_instance_id = global_test_holder[op_name(node_instance, operation_name)] - assert op_node_instance_id == node_instance.id - assert op_node_instance_id != other_node_instance.id + op_node_instance_id = global_test_holder[op_name(node, operation_name)] + assert op_node_instance_id == node.id + assert op_node_instance_id != other_node.id def test_plugin_workdir(ctx, executor, tmpdir): op = 'test.op' plugin_name = 'mock_plugin' - node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) - node.operations[op] = {'operation': '{0}.{1}'.format(__name__, _test_plugin_workdir.__name__), - 'plugin': plugin_name} + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) + node.interfaces = [mock.models.get_interface( + op, + operation_kwargs=dict( + implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__), + plugin=plugin_name) + )] node.plugins = [{'name': plugin_name}] ctx.model.node.update(node) - node_instance = ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) filename = 'test_file' content = 'file content' @@ -193,10 +198,12 @@ def test_plugin_workdir(ctx, executor, tmpdir): @workflow def basic_workflow(graph, **_): graph.add_tasks(api.task.OperationTask.node_instance( - name=op, instance=node_instance, inputs=inputs)) + name=op, instance=node, inputs=inputs)) execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor) - expected_file = tmpdir.join('workdir', 'plugins', str(ctx.deployment.id), plugin_name, filename) + expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service_instance.id), + plugin_name, + filename) assert expected_file.read() == content @@ -207,7 +214,7 @@ def my_operation(ctx, **_): @operation def get_node_instance_id(ctx, **_): - global_test_holder[ctx.name] = ctx.node_instance.id + global_test_holder[ctx.name] = ctx.node.id @operation http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/context/test_resource_render.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py index ca2ef42..3ba0873 100644 --- a/tests/orchestrator/context/test_resource_render.py +++ b/tests/orchestrator/context/test_resource_render.py @@ -17,7 +17,7 @@ import pytest from tests import mock, storage -_IMPLICIT_CTX_TEMPLATE = '{{ctx.deployment.name}}' +_IMPLICIT_CTX_TEMPLATE = '{{ctx.service_instance.name}}' _IMPLICIT_CTX_TEMPLATE_PATH = 'implicit-ctx.template' _VARIABLES_TEMPLATE = '{{variable}}' _VARIABLES_TEMPLATE_PATH = 'variables.template' http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/context/test_serialize.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py index 76930b1..eed98a4 100644 --- a/tests/orchestrator/context/test_serialize.py +++ b/tests/orchestrator/context/test_serialize.py @@ -49,13 +49,14 @@ def test_illegal_serialize_of_memory_model_storage(memory_model_storage): @workflow def _mock_workflow(ctx, graph): - op = 'test.op' + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) plugin_name = 'mock_plugin' - node_instance = ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - node = node_instance.node - node.operations[op] = {'operation': _operation_mapping(), 'plugin': plugin_name} + node.interfaces = [mock.models.get_interface( + 'test.op', + operation_kwargs=dict(implementation=_operation_mapping(), plugin=plugin_name) + )] node.plugins = [{'name': plugin_name}] - task = api.task.OperationTask.node_instance(instance=node_instance, name=op) + task = api.task.OperationTask.node_instance(instance=node, name='test.op') graph.add_tasks(task) return graph @@ -65,14 +66,14 @@ def _mock_operation(ctx): # We test several things in this operation # ctx.task, ctx.node, etc... tell us that the model storage was properly re-created # a correct ctx.task.operation_mapping tells us we kept the correct task_id - assert ctx.task.operation_mapping == _operation_mapping() + assert ctx.task.implementation == _operation_mapping() # a correct ctx.node.name tells us we kept the correct actor_id - assert ctx.node.name == mock.models.DEPENDENCY_NODE_NAME + assert ctx.node.name == mock.models.DEPENDENCY_NODE_INSTANCE_NAME # a correct ctx.name tells us we kept the correct name assert ctx.name is not None assert ctx.name == ctx.task.name # a correct ctx.deployment.name tells us we kept the correct deployment_id - assert ctx.deployment.name == mock.models.DEPLOYMENT_NAME + assert ctx.service_instance.name == mock.models.DEPLOYMENT_NAME # Here we test that the resource storage was properly re-created test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME) assert test_file_content == TEST_FILE_CONTENT http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/context/test_toolbelt.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py index b63811b..5805293 100644 --- a/tests/orchestrator/context/test_toolbelt.py +++ b/tests/orchestrator/context/test_toolbelt.py @@ -48,45 +48,46 @@ def executor(): def _get_elements(workflow_context): - dependency_node = workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) - dependency_node.host = dependency_node - workflow_context.model.node.update(dependency_node) + dependency_node_template = workflow_context.model.node_template.get_by_name( + mock.models.DEPENDENCY_NODE_NAME) + dependency_node_template.host = dependency_node_template + workflow_context.model.node.update(dependency_node_template) - dependency_node_instance = workflow_context.model.node_instance.get_by_name( + dependency_node = workflow_context.model.node.get_by_name( mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - dependency_node_instance.host_fk = dependency_node_instance.id - workflow_context.model.node_instance.update(dependency_node_instance) + dependency_node.host_fk = dependency_node.id + workflow_context.model.node.update(dependency_node) - dependent_node = workflow_context.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME) - dependent_node.host_fk = dependency_node.id - workflow_context.model.node.update(dependent_node) + dependent_node_template = workflow_context.model.node_template.get_by_name( + mock.models.DEPENDENT_NODE_NAME) + dependent_node_template.host = dependency_node_template + workflow_context.model.node_template.update(dependent_node_template) - dependent_node_instance = workflow_context.model.node_instance.get_by_name( + dependent_node = workflow_context.model.node.get_by_name( mock.models.DEPENDENT_NODE_INSTANCE_NAME) - dependent_node_instance.host_fk = dependent_node_instance.id - workflow_context.model.node_instance.update(dependent_node_instance) + dependent_node.host = dependent_node + workflow_context.model.node.update(dependent_node) relationship = workflow_context.model.relationship.list()[0] - relationship_instance = workflow_context.model.relationship_instance.list()[0] - return dependency_node, dependency_node_instance, dependent_node, dependent_node_instance, \ - relationship, relationship_instance + return dependency_node_template, dependency_node, dependent_node_template, dependent_node, \ + relationship def test_host_ip(workflow_context, executor): operation_name = 'aria.interfaces.lifecycle.create' - dependency_node, dependency_node_instance, _, _, _, _ = _get_elements(workflow_context) - dependency_node.operations[operation_name] = { - 'operation': op_path(host_ip, module_path=__name__) - - } - workflow_context.model.node.put(dependency_node) + _, dependency_node, _, _, _ = _get_elements(workflow_context) + dependency_node.interfaces = [mock.models.get_interface( + operation_name, + operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__)) + )] + workflow_context.model.node.update(dependency_node) inputs = {'putput': True} @workflow def basic_workflow(graph, **_): graph.add_tasks( api.task.OperationTask.node_instance( - instance=dependency_node_instance, + instance=dependency_node, name=operation_name, inputs=inputs ) @@ -94,18 +95,20 @@ def test_host_ip(workflow_context, executor): execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor) - assert global_test_holder.get('host_ip') == \ - dependency_node_instance.runtime_properties.get('ip') + assert global_test_holder.get('host_ip') == dependency_node.runtime_properties.get('ip') def test_relationship_tool_belt(workflow_context, executor): - operation_name = 'aria.interfaces.relationship_lifecycle.postconfigure' - _, _, _, _, relationship, relationship_instance = \ - _get_elements(workflow_context) - relationship.source_operations[operation_name] = { - 'operation': op_path(relationship_operation, module_path=__name__) - } - workflow_context.model.relationship.put(relationship) + operation_name = 'aria.interfaces.relationship_lifecycle.post_configure' + _, _, _, _, relationship = _get_elements(workflow_context) + relationship.interfaces = [ + mock.models.get_interface( + operation_name, + operation_kwargs=dict( + implementation=op_path(relationship_operation, module_path=__name__)), + edge='source') + ] + workflow_context.model.relationship.update(relationship) inputs = {'putput': True} @@ -113,16 +116,16 @@ def test_relationship_tool_belt(workflow_context, executor): def basic_workflow(graph, **_): graph.add_tasks( api.task.OperationTask.relationship_instance( - instance=relationship_instance, + instance=relationship, name=operation_name, - operation_end=api.task.OperationTask.SOURCE_OPERATION, + edge='source', inputs=inputs ) ) execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor) - assert isinstance(global_test_holder.get(op_name(relationship_instance, operation_name)), + assert isinstance(global_test_holder.get(op_name(relationship, operation_name)), RelationshipToolBelt) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/context/test_workflow.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py index 496c1ff..89afb39 100644 --- a/tests/orchestrator/context/test_workflow.py +++ b/tests/orchestrator/context/test_workflow.py @@ -29,9 +29,11 @@ class TestWorkflowContext(object): def test_execution_creation_on_workflow_context_creation(self, storage): ctx = self._create_ctx(storage) execution = storage.execution.get(ctx.execution.id) # pylint: disable=no-member - assert execution.deployment == storage.deployment.get_by_name(models.DEPLOYMENT_NAME) + assert execution.service_instance == storage.service_instance.get_by_name( + models.DEPLOYMENT_NAME) assert execution.workflow_name == models.WORKFLOW_NAME - assert execution.blueprint == storage.blueprint.get_by_name(models.BLUEPRINT_NAME) + assert execution.service_template == storage.service_template.get_by_name( + models.BLUEPRINT_NAME) assert execution.status == storage.execution.model_cls.PENDING assert execution.parameters == {} assert execution.created_at <= datetime.utcnow() @@ -51,7 +53,7 @@ class TestWorkflowContext(object): name='simple_context', model_storage=storage, resource_storage=None, - deployment_id=storage.deployment.get_by_name(models.DEPLOYMENT_NAME).id, + service_instance_id=storage.service_instance.get_by_name(models.DEPLOYMENT_NAME).id, workflow_name=models.WORKFLOW_NAME, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL @@ -62,8 +64,8 @@ class TestWorkflowContext(object): def storage(): api_kwargs = test_storage.get_sqlite_api_kwargs() workflow_storage = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs) - workflow_storage.blueprint.put(models.get_blueprint()) - blueprint = workflow_storage.blueprint.get_by_name(models.BLUEPRINT_NAME) - workflow_storage.deployment.put(models.get_deployment(blueprint)) + workflow_storage.service_template.put(models.get_blueprint()) + blueprint = workflow_storage.service_template.get_by_name(models.BLUEPRINT_NAME) + workflow_storage.service_instance.put(models.get_deployment(blueprint)) yield workflow_storage test_storage.release_sqlite_storage(workflow_storage) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fb3ac003/tests/orchestrator/execution_plugin/test_local.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py index 497da48..f9d4485 100644 --- a/tests/orchestrator/execution_plugin/test_local.py +++ b/tests/orchestrator/execution_plugin/test_local.py @@ -42,10 +42,10 @@ class TestLocalRunScript(object): script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties map.key value + ctx node runtime-properties map.key value ''', windows_script=''' - ctx node-instance runtime-properties map.key value + ctx node runtime-properties map.key value ''') props = self._run( executor, workflow_context, @@ -56,12 +56,12 @@ class TestLocalRunScript(object): script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties map.key1 $key1 - ctx node-instance runtime-properties map.key2 $key2 + ctx node runtime-properties map.key1 $key1 + ctx node runtime-properties map.key2 $key2 ''', windows_script=''' - ctx node-instance runtime-properties map.key1 %key1% - ctx node-instance runtime-properties map.key2 %key2% + ctx node runtime-properties map.key1 %key1% + ctx node runtime-properties map.key2 %key2% ''') props = self._run( executor, workflow_context, @@ -80,10 +80,10 @@ class TestLocalRunScript(object): script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties map.cwd $PWD + ctx node runtime-properties map.cwd $PWD ''', windows_script=''' - ctx node-instance runtime-properties map.cwd %CD% + ctx node runtime-properties map.cwd %CD% ''') tmpdir = str(tmpdir) props = self._run( @@ -96,7 +96,7 @@ class TestLocalRunScript(object): assert p_map['cwd'] == tmpdir def test_process_command_prefix(self, executor, workflow_context, tmpdir): - use_ctx = 'ctx node-instance runtime-properties map.key value' + use_ctx = 'ctx node runtime-properties map.key value' python_script = ['import subprocess', 'subprocess.Popen("{0}".split(' ')).communicate()[0]'.format(use_ctx)] python_script = '\n'.join(python_script) @@ -120,12 +120,12 @@ class TestLocalRunScript(object): script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties map.arg1 "$1" - ctx node-instance runtime-properties map.arg2 $2 + ctx node runtime-properties map.arg1 "$1" + ctx node runtime-properties map.arg2 $2 ''', windows_script=''' - ctx node-instance runtime-properties map.arg1 %1 - ctx node-instance runtime-properties map.arg2 %2 + ctx node runtime-properties map.arg1 %1 + ctx node runtime-properties map.arg2 %2 ''') props = self._run( executor, workflow_context, @@ -186,7 +186,7 @@ class TestLocalRunScript(object): script = ''' from aria.orchestrator.execution_plugin import ctx, inputs if __name__ == '__main__': - ctx.node_instance.runtime_properties['key'] = inputs['key'] + ctx.node.runtime_properties['key'] = inputs['key'] ''' suffix = '.py' script_path = self._create_script( @@ -208,10 +208,10 @@ if __name__ == '__main__': script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties key "${input_as_env_var}" + ctx node runtime-properties key "${input_as_env_var}" ''', windows_script=''' - ctx node-instance runtime-properties key "%input_as_env_var%" + ctx node runtime-properties key "%input_as_env_var%" ''') props = self._run( executor, workflow_context, @@ -226,10 +226,10 @@ if __name__ == '__main__': script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties key "${input_as_env_var}" + ctx node runtime-properties key "${input_as_env_var}" ''', windows_script=''' - ctx node-instance runtime-properties key "%input_as_env_var%" + ctx node runtime-properties key "%input_as_env_var%" ''') props = self._run( @@ -248,10 +248,10 @@ if __name__ == '__main__': script_path = self._create_script( tmpdir, linux_script='''#! /bin/bash -e - ctx node-instance runtime-properties nonexistent + ctx node runtime-properties nonexistent ''', windows_script=''' - ctx node-instance runtime-properties nonexistent + ctx node runtime-properties nonexistent ''') exception = self._run_and_get_task_exception( executor, workflow_context, @@ -462,7 +462,7 @@ if __name__ == '__main__': script_path = os.path.basename(local_script_path) if local_script_path else None if script_path: workflow_context.resource.deployment.upload( - entry_id=str(workflow_context.deployment.id), + entry_id=str(workflow_context.service_instance.id), source=local_script_path, path=script_path) @@ -476,13 +476,18 @@ if __name__ == '__main__': @workflow def mock_workflow(ctx, graph): op = 'test.op' - node_instance = ctx.model.node_instance.get_by_name( - mock.models.DEPENDENCY_NODE_INSTANCE_NAME) - node_instance.node.operations[op] = { - 'operation': '{0}.{1}'.format(operations.__name__, - operations.run_script_locally.__name__)} + node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME) + node.interfaces = [mock.models.get_interface( + op, + operation_kwargs=dict(implementation='{0}.{1}'.format( + operations.__name__, + operations.run_script_locally.__name__)) + )] + # node.operations[op] = { + # 'operation': '{0}.{1}'.format(operations.__name__, + # operations.run_script_locally.__name__)} graph.add_tasks(api.task.OperationTask.node_instance( - instance=node_instance, + instance=node, name=op, inputs=inputs)) return graph @@ -492,7 +497,7 @@ if __name__ == '__main__': workflow_context=workflow_context, tasks_graph=tasks_graph) eng.execute() - return workflow_context.model.node_instance.get_by_name( + return workflow_context.model.node.get_by_name( mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties @pytest.fixture
