text
stringlengths
81
112k
Returns either the topic component types or the rconfigration's component types. def get_component_types(topic_id, remoteci_id, db_conn=None): """Returns either the topic component types or the rconfigration's component types.""" db_conn = db_conn or flask.g.db_conn rconfiguration = remotecis.get_remoteci_configuration(topic_id, remoteci_id, db_conn=db_conn) # if there is no rconfiguration associated to the remoteci or no # component types then use the topic's one. if (rconfiguration is not None and rconfiguration['component_types'] is not None): component_types = rconfiguration['component_types'] else: component_types = get_component_types_from_topic(topic_id, db_conn=db_conn) return component_types, rconfiguration
For each component type of a topic, get the last one. def get_last_components_by_type(component_types, topic_id, db_conn=None): """For each component type of a topic, get the last one.""" db_conn = db_conn or flask.g.db_conn schedule_components_ids = [] for ct in component_types: where_clause = sql.and_(models.COMPONENTS.c.type == ct, models.COMPONENTS.c.topic_id == topic_id, models.COMPONENTS.c.export_control == True, models.COMPONENTS.c.state == 'active') # noqa query = (sql.select([models.COMPONENTS.c.id]) .where(where_clause) .order_by(sql.desc(models.COMPONENTS.c.created_at))) cmpt_id = db_conn.execute(query).fetchone() if cmpt_id is None: msg = 'Component of type "%s" not found or not exported.' % ct raise dci_exc.DCIException(msg, status_code=412) cmpt_id = cmpt_id[0] if cmpt_id in schedule_components_ids: msg = ('Component types %s malformed: type %s duplicated.' % (component_types, ct)) raise dci_exc.DCIException(msg, status_code=412) schedule_components_ids.append(cmpt_id) return schedule_components_ids
Process some verifications of the provided components ids. def verify_and_get_components_ids(topic_id, components_ids, component_types, db_conn=None): """Process some verifications of the provided components ids.""" db_conn = db_conn or flask.g.db_conn if len(components_ids) != len(component_types): msg = 'The number of component ids does not match the number ' \ 'of component types %s' % component_types raise dci_exc.DCIException(msg, status_code=412) # get the components from their ids schedule_component_types = set() for c_id in components_ids: where_clause = sql.and_(models.COMPONENTS.c.id == c_id, models.COMPONENTS.c.topic_id == topic_id, models.COMPONENTS.c.export_control == True, # noqa models.COMPONENTS.c.state == 'active') query = (sql.select([models.COMPONENTS]) .where(where_clause)) cmpt = db_conn.execute(query).fetchone() if cmpt is None: msg = 'Component id %s not found or not exported' % c_id raise dci_exc.DCIException(msg, status_code=412) cmpt = dict(cmpt) if cmpt['type'] in schedule_component_types: msg = ('Component types malformed: type %s duplicated.' % cmpt['type']) raise dci_exc.DCIException(msg, status_code=412) schedule_component_types.add(cmpt['type']) return components_ids
Retrieve all tags attached to a component. def retrieve_tags_from_component(user, c_id): """Retrieve all tags attached to a component.""" JCT = models.JOIN_COMPONENTS_TAGS query = (sql.select([models.TAGS]) .select_from(JCT.join(models.TAGS)) .where(JCT.c.component_id == c_id)) rows = flask.g.db_conn.execute(query) return flask.jsonify({'tags': rows, '_meta': {'count': rows.rowcount}})
Add a tag on a specific component. def add_tag_for_component(user, c_id): """Add a tag on a specific component.""" v1_utils.verify_existence_and_get(c_id, _TABLE) values = { 'component_id': c_id } component_tagged = tags.add_tag_to_resource(values, models.JOIN_COMPONENTS_TAGS) return flask.Response(json.dumps(component_tagged), 201, content_type='application/json')
Delete a tag on a specific component. def delete_tag_for_component(user, c_id, tag_id): """Delete a tag on a specific component.""" # Todo : check c_id and tag_id exist in db query = _TABLE_TAGS.delete().where(_TABLE_TAGS.c.tag_id == tag_id and _TABLE_TAGS.c.component_id == c_id) try: flask.g.db_conn.execute(query) except sa_exc.IntegrityError: raise dci_exc.DCICreationConflict(_TABLE_TAGS.c.tag_id, 'tag_id') return flask.Response(None, 204, content_type='application/json')
Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error def should_be_hidden_as_cause(exc): """ Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error """ # reduced traceback in case of HasWrongType (instance_of checks) from valid8.validation_lib.types import HasWrongType, IsWrongType return isinstance(exc, (HasWrongType, IsWrongType))
Helper function to determine if some exception is of some type, by also looking at its declared __cause__ :param exc: :param ref_type: :return: def is_error_of_type(exc, ref_type): """ Helper function to determine if some exception is of some type, by also looking at its declared __cause__ :param exc: :param ref_type: :return: """ if isinstance(exc, ref_type): return True elif hasattr(exc, '__cause__') and exc.__cause__ is not None: return is_error_of_type(exc.__cause__, ref_type)
Wraps the provided validation function so that in case of failure it raises the given failure_type or a WrappingFailure with the given help message. :param validation_callable: :param failure_type: an optional subclass of `WrappingFailure` that should be raised in case of failure, instead of `WrappingFailure`. :param help_msg: an optional string help message for the raised `WrappingFailure` (if no failure_type is provided) :param kw_context_args: optional context arguments for the custom failure message :return: def _failure_raiser(validation_callable, # type: Callable failure_type=None, # type: Type[WrappingFailure] help_msg=None, # type: str **kw_context_args): # type: (...) -> Callable """ Wraps the provided validation function so that in case of failure it raises the given failure_type or a WrappingFailure with the given help message. :param validation_callable: :param failure_type: an optional subclass of `WrappingFailure` that should be raised in case of failure, instead of `WrappingFailure`. :param help_msg: an optional string help message for the raised `WrappingFailure` (if no failure_type is provided) :param kw_context_args: optional context arguments for the custom failure message :return: """ # check failure type if failure_type is not None and help_msg is not None: raise ValueError('Only one of failure_type and help_msg can be set at the same time') # convert mini-lambdas to functions if needed validation_callable = as_function(validation_callable) # create wrapper # option (a) use the `decorate()` helper method to preserve name and signature of the inner object # ==> NO, we want to support also non-function callable objects # option (b) simply create a wrapper manually def raiser(x): """ Wraps validation_callable to raise a failure_type_or_help_msg in case of failure """ try: # perform validation res = validation_callable(x) except Exception as e: # no need to raise from e since the __cause__ is already set in the constructor: we can safely commonalize res = e if not result_is_success(res): typ = failure_type or WrappingFailure exc = typ(wrapped_func=validation_callable, wrong_value=x, validation_outcome=res, help_msg=help_msg, **kw_context_args) raise exc # set a name so that the error messages are more user-friendly # NO, Do not include the callable type or error message in the name since it is only used in error messages where # they will appear anyway ! # --- # if help_msg or failure_type: # raiser.__name__ = 'failure_raiser({}, {})'.format(get_callable_name(validation_callable), # help_msg or failure_type.__name__) # else: # --- # raiser.__name__ = 'failure_raiser({})'.format(get_callable_name(validation_callable)) raiser.__name__ = get_callable_name(validation_callable) # Note: obviously this can hold as long as we do not check the name of this object in any other context than # raising errors. If we want to support this, then creating a callable object with everything in the fields will be # probably more appropriate so that error messages will be able to display the inner name, while repr() will still # say that this is a failure raiser. # TODO consider transforming failure_raiser into a class (see comment above) return raiser
Wraps the given validation callable to accept None values silently. When a None value is received by the wrapper, it is not passed to the validation_callable and instead this function will return True. When any other value is received the validation_callable is called as usual. Note: the created wrapper has the same same than the validation callable for more user-friendly error messages :param validation_callable: :return: def _none_accepter(validation_callable # type: Callable ): # type: (...) -> Callable """ Wraps the given validation callable to accept None values silently. When a None value is received by the wrapper, it is not passed to the validation_callable and instead this function will return True. When any other value is received the validation_callable is called as usual. Note: the created wrapper has the same same than the validation callable for more user-friendly error messages :param validation_callable: :return: """ # option (a) use the `decorate()` helper method to preserve name and signature of the inner object # ==> NO, we want to support also non-function callable objects # option (b) simply create a wrapper manually def accept_none(x): if x is not None: # proceed with validation as usual return validation_callable(x) else: # value is None: skip validation return True # set a name so that the error messages are more user-friendly accept_none.__name__ = 'skip_on_none({})'.format(get_callable_name(validation_callable)) return accept_none
Wraps the given validation callable to reject None values. When a None value is received by the wrapper, it is not passed to the validation_callable and instead this function will raise a WrappingFailure. When any other value is received the validation_callable is called as usual. :param validation_callable: :return: def _none_rejecter(validation_callable # type: Callable ): # type: (...) -> Callable """ Wraps the given validation callable to reject None values. When a None value is received by the wrapper, it is not passed to the validation_callable and instead this function will raise a WrappingFailure. When any other value is received the validation_callable is called as usual. :param validation_callable: :return: """ # option (a) use the `decorate()` helper method to preserve name and signature of the inner object # ==> NO, we want to support also non-function callable objects # option (b) simply create a wrapper manually def reject_none(x): if x is not None: return validation_callable(x) else: raise ValueIsNone(wrong_value=x) # set a name so that the error messages are more user-friendly ==> NO ! here we want to see the checker reject_none.__name__ = 'reject_none({})'.format(get_callable_name(validation_callable)) return reject_none
The method used to get the formatted help message according to kwargs. By default it returns the 'help_msg' attribute, whether it is defined at the instance level or at the class level. The help message is formatted according to help_msg.format(**kwargs), and may be terminated with a dot and a space if dotspace_ending is set to True. :param dotspace_ending: True will append a dot and a space at the end of the message if it is not empty (default is False) :param kwargs: keyword arguments to format the help message :return: the formatted help message def get_help_msg(self, dotspace_ending=False, # type: bool **kwargs): # type: (...) -> str """ The method used to get the formatted help message according to kwargs. By default it returns the 'help_msg' attribute, whether it is defined at the instance level or at the class level. The help message is formatted according to help_msg.format(**kwargs), and may be terminated with a dot and a space if dotspace_ending is set to True. :param dotspace_ending: True will append a dot and a space at the end of the message if it is not empty (default is False) :param kwargs: keyword arguments to format the help message :return: the formatted help message """ context = self.get_context_for_help_msgs(kwargs) if self.help_msg is not None and len(self.help_msg) > 0: # create a copy because we will modify it context = copy(context) # first format if needed try: help_msg = self.help_msg variables = re.findall("{\S+}", help_msg) for v in set(variables): v = v[1:-1] if v in context and len(str(context[v])) > self.__max_str_length_displayed__: new_name = '@@@@' + v + '@@@@' help_msg = help_msg.replace('{' + v + '}', '{' + new_name + '}') context[new_name] = "(too big for display)" help_msg = help_msg.format(**context) except KeyError as e: # no need to raise from e, __cause__ is set in the constructor raise HelpMsgFormattingException(self.help_msg, e, context) # then add a trailing dot and space if needed if dotspace_ending: return end_with_dot_space(help_msg) else: return help_msg else: return ''
The function called to get the details appended to the help message when self.append_details is True def get_details(self): """ The function called to get the details appended to the help message when self.append_details is True """ strval = str(self.wrong_value) if len(strval) > self.__max_str_length_displayed__: return '(Actual value is too big to be printed in this message)' else: return 'Wrong value: [{}]'.format(self.wrong_value)
Overrides the method in Failure so as to add a few details about the wrapped function and outcome def get_details(self): """ Overrides the method in Failure so as to add a few details about the wrapped function and outcome """ if isinstance(self.validation_outcome, Exception): if isinstance(self.validation_outcome, Failure): # do not say again what was the value, it is already mentioned inside :) end_str = '' else: end_str = ' for value [{value}]'.format(value=self.wrong_value) contents = 'Function [{wrapped}] raised [{exception}: {details}]{end}.' \ ''.format(wrapped=get_callable_name(self.wrapped_func), exception=type(self.validation_outcome).__name__, details=self.validation_outcome, end=end_str) else: contents = 'Function [{wrapped}] returned [{result}] for value [{value}].' \ ''.format(wrapped=get_callable_name(self.wrapped_func), result=self.validation_outcome, value=self.wrong_value) return contents
We override this method from HelpMsgMixIn to replace wrapped_func with its name def get_context_for_help_msgs(self, context_dict): """ We override this method from HelpMsgMixIn to replace wrapped_func with its name """ context_dict = copy(context_dict) context_dict['wrapped_func'] = get_callable_name(context_dict['wrapped_func']) return context_dict
A class decorator. It goes through all class variables and for all of those that are descriptors with a __set__, it wraps the descriptors' setter function with a `validate_arg` annotation :param field_name: :param validation_func: :param help_msg: :param error_type: :param none_policy: :param kw_context_args: :return def validate_field(cls, field_name, *validation_func, # type: ValidationFuncs **kwargs): # type: (...) -> Callable """ A class decorator. It goes through all class variables and for all of those that are descriptors with a __set__, it wraps the descriptors' setter function with a `validate_arg` annotation :param field_name: :param validation_func: :param help_msg: :param error_type: :param none_policy: :param kw_context_args: :return """ return decorate_cls_with_validation(cls, field_name, *validation_func, **kwargs)
A function decorator to add input validation prior to the function execution. It should be called with named arguments: for each function arg name, provide a single validation function or a list of validation functions to apply. If validation fails, it will raise an InputValidationError with details about the function, the input name, and any further information available from the validation function(s) For example: ``` def is_even(x): return x % 2 == 0 def gt(a): def gt(x): return x >= a return gt @validate_io(a=[is_even, gt(1)], b=is_even) def myfunc(a, b): print('hello') ``` will generate the equivalent of : ``` def myfunc(a, b): gt1 = gt(1) if (is_even(a) and gt1(a)) and is_even(b): print('hello') else: raise InputValidationError(...) ``` :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :param _out_: a validation function or list of validation functions to apply to the function output. See kw_validation_funcs for details about the syntax. :param kw_validation_funcs: keyword arguments: for each of the function's input names, the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :return: the decorated function, that will perform input validation before executing the function's code everytime it is executed. def validate_io(f=DECORATED, none_policy=None, # type: int _out_=None, # type: ValidationFuncs **kw_validation_funcs # type: ValidationFuncs ): """ A function decorator to add input validation prior to the function execution. It should be called with named arguments: for each function arg name, provide a single validation function or a list of validation functions to apply. If validation fails, it will raise an InputValidationError with details about the function, the input name, and any further information available from the validation function(s) For example: ``` def is_even(x): return x % 2 == 0 def gt(a): def gt(x): return x >= a return gt @validate_io(a=[is_even, gt(1)], b=is_even) def myfunc(a, b): print('hello') ``` will generate the equivalent of : ``` def myfunc(a, b): gt1 = gt(1) if (is_even(a) and gt1(a)) and is_even(b): print('hello') else: raise InputValidationError(...) ``` :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :param _out_: a validation function or list of validation functions to apply to the function output. See kw_validation_funcs for details about the syntax. :param kw_validation_funcs: keyword arguments: for each of the function's input names, the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :return: the decorated function, that will perform input validation before executing the function's code everytime it is executed. """ return decorate_several_with_validation(f, none_policy=none_policy, _out_=_out_, **kw_validation_funcs)
A decorator to apply function input validation for the given argument name, with the provided base validation function(s). You may use several such decorators on a given function as long as they are stacked on top of each other (no external decorator in the middle) :param arg_name: :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: a function decorator, able to transform a function into a function that will perform input validation before executing the function's code everytime it is executed. def validate_arg(f, arg_name, *validation_func, # type: ValidationFuncs **kwargs ): # type: (...) -> Callable """ A decorator to apply function input validation for the given argument name, with the provided base validation function(s). You may use several such decorators on a given function as long as they are stacked on top of each other (no external decorator in the middle) :param arg_name: :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: a function decorator, able to transform a function into a function that will perform input validation before executing the function's code everytime it is executed. """ return decorate_with_validation(f, arg_name, *validation_func, **kwargs)
A decorator to apply function output validation to this function's output, with the provided base validation function(s). You may use several such decorators on a given function as long as they are stacked on top of each other (no external decorator in the middle) :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :return: a function decorator, able to transform a function into a function that will perform input validation before executing the function's code everytime it is executed. def validate_out(*validation_func, # type: ValidationFuncs **kwargs): # type: (...) -> Callable """ A decorator to apply function output validation to this function's output, with the provided base validation function(s). You may use several such decorators on a given function as long as they are stacked on top of each other (no external decorator in the middle) :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`. :return: a function decorator, able to transform a function into a function that will perform input validation before executing the function's code everytime it is executed. """ def decorate(f): return decorate_with_validation(f, _OUT_KEY, *validation_func, **kwargs) return decorate
This method is equivalent to decorating a class with the `@validate_field` decorator but can be used a posteriori. :param cls: the class to decorate :param field_name: the name of the argument to validate or _OUT_KEY for output validation :param validation_func: the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_REJECT`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: the decorated function, that will perform input validation (using `_assert_input_is_valid`) before executing the function's code everytime it is executed. def decorate_cls_with_validation(cls, field_name, # type: str *validation_func, # type: ValidationFuncs **kwargs): # type: (...) -> Type[Any] """ This method is equivalent to decorating a class with the `@validate_field` decorator but can be used a posteriori. :param cls: the class to decorate :param field_name: the name of the argument to validate or _OUT_KEY for output validation :param validation_func: the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_REJECT`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: the decorated function, that will perform input validation (using `_assert_input_is_valid`) before executing the function's code everytime it is executed. """ error_type, help_msg, none_policy = pop_kwargs(kwargs, [('error_type', None), ('help_msg', None), ('none_policy', None)], allow_others=True) # the rest of keyword arguments is used as context. kw_context_args = kwargs if not isclass(cls): raise TypeError('decorated cls should be a class') if hasattr(cls, field_name): # ** A class field with that name exist. Is it a descriptor ? var = cls.__dict__[field_name] # note: we cannot use getattr here if hasattr(var, '__set__') and callable(var.__set__): if isinstance(var, property): # *** OLD WAY which was losing type hints and default values (see var.__set__ signature) *** # properties are special beasts: their methods are method-wrappers (CPython) and can not have properties # so we have to create a wrapper (sic) before sending it to the main wrapping function # def func(inst, value): # var.__set__(inst, value) # *** NEW WAY : more elegant, use directly the setter provided by the user *** func = var.fset nb_args = 2 elif ismethod(var.__set__): # bound method: normal. Let's access to the underlying function func = var.__set__.__func__ nb_args = 3 else: # strange.. but lets try to continue func = var.__set__ nb_args = 3 # retrieve target function signature, check it and retrieve the 3d param # since signature is "def __set__(self, obj, val)" func_sig = signature(func) if len(func_sig.parameters) != nb_args: raise ValueError("Class field '{}' is a valid class descriptor for class '{}' but it does not implement" " __set__ with the correct number of parameters, so it is not possible to add " "validation to it. See https://docs.python.org/3.6/howto/descriptor.html". format(field_name, cls.__name__)) # extract the correct name descriptor_arg_name = list(func_sig.parameters.items())[-1][0] # do the same than in decorate_with_validation but with a class field validator # new_setter = decorate_with_validation(func, descriptor_arg_name, *validation_func, help_msg=help_msg, # error_type=error_type, none_policy=none_policy, # _clazz_field_name_=field_name, **kw_context_args) # --create the new validator none_policy = none_policy or NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE new_validator = _create_function_validator(func, func_sig, descriptor_arg_name, *validation_func, none_policy=none_policy, error_type=error_type, help_msg=help_msg, validated_class=cls, validated_class_field_name=field_name, **kw_context_args) # -- create the new setter with validation new_setter = decorate_with_validators(func, func_signature=func_sig, **{descriptor_arg_name: new_validator}) # replace the old one if isinstance(var, property): # properties are special beasts 2 setattr(cls, field_name, var.setter(new_setter)) else: # do not use type() for python 2 compat var.__class__.__set__ = new_setter elif (hasattr(var, '__get__') and callable(var.__get__)) \ or (hasattr(var, '__delete__') and callable(var.__delete__)): # this is a descriptor but it does not have any setter method: impossible to validate raise ValueError("Class field '{}' is a valid class descriptor for class '{}' but it does not implement " "__set__ so it is not possible to add validation to it. See " "https://docs.python.org/3.6/howto/descriptor.html".format(field_name, cls.__name__)) else: # this is not a descriptor: unsupported raise ValueError("Class field '{}.{}' is not a valid class descriptor, see " "https://docs.python.org/3.6/howto/descriptor.html".format(cls.__name__, field_name)) else: # ** No class field with that name exist # ? check for attrs ? > no specific need anymore, this is the same than annotating the constructor # if hasattr(cls, '__attrs_attrs__'): this was a proof of attrs-defined class # try to annotate the generated constructor try: init_func = cls.__init__ if sys.version_info < (3, 0): try: # python 2 - we have to access the inner `im_func` init_func = cls.__init__.im_func except AttributeError: pass cls.__init__ = decorate_with_validation(init_func, field_name, *validation_func, help_msg=help_msg, _constructor_of_cls_=cls, error_type=error_type, none_policy=none_policy, **kw_context_args) except InvalidNameError: # the field was not found # TODO should we also check if a __setattr__ is defined ? # (for __setattr__ see https://stackoverflow.com/questions/15750522/class-properties-and-setattr/15751159) # finally raise an error raise ValueError("@validate_field definition exception: field '{}' can not be found in class '{}', and it " "is also not an input argument of the __init__ method.".format(field_name, cls.__name__)) return cls
This method is equivalent to applying `decorate_with_validation` once for each of the provided arguments of the function `func` as well as output `_out_`. validation_funcs keyword arguments are validation functions for each arg name. Note that this method is less flexible than decorate_with_validation since * it does not allow to associate a custom error message or error type with each validation. * the none_policy is the same for all inputs and outputs :param func: :param _out_: :param validation_funcs: :param none_policy: :return: a function decorated with validation for all of the listed arguments and output if provided. def decorate_several_with_validation(func, _out_=None, # type: ValidationFuncs none_policy=None, # type: int **validation_funcs # type: ValidationFuncs ): # type: (...) -> Callable """ This method is equivalent to applying `decorate_with_validation` once for each of the provided arguments of the function `func` as well as output `_out_`. validation_funcs keyword arguments are validation functions for each arg name. Note that this method is less flexible than decorate_with_validation since * it does not allow to associate a custom error message or error type with each validation. * the none_policy is the same for all inputs and outputs :param func: :param _out_: :param validation_funcs: :param none_policy: :return: a function decorated with validation for all of the listed arguments and output if provided. """ # add validation for output if provided if _out_ is not None: func = decorate_with_validation(func, _OUT_KEY, _out_, none_policy=none_policy) # add validation for each of the listed arguments for att_name, att_validation_funcs in validation_funcs.items(): func = decorate_with_validation(func, att_name, att_validation_funcs, none_policy=none_policy) return func
This method is the inner method used in `@validate_io`, `@validate_arg` and `@validate_out`. It can be used if you with to perform decoration manually without a decorator. :param func: :param arg_name: the name of the argument to validate or _OUT_KEY for output validation :param validation_func: the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_REJECT`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: the decorated function, that will perform input validation (using `_assert_input_is_valid`) before executing the function's code everytime it is executed. def decorate_with_validation(func, arg_name, # type: str *validation_func, # type: ValidationFuncs **kwargs): # type: (...) -> Callable """ This method is the inner method used in `@validate_io`, `@validate_arg` and `@validate_out`. It can be used if you with to perform decoration manually without a decorator. :param func: :param arg_name: the name of the argument to validate or _OUT_KEY for output validation :param validation_func: the validation function or list of validation functions to use. A validation function may be a callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param error_type: a subclass of ValidationError to raise in case of validation failure. By default a ValidationError will be raised with the provided help_msg :param help_msg: an optional help message to be used in the raised error in case of validation failure. :param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_REJECT`. :param kw_context_args: optional contextual information to store in the exception, and that may be also used to format the help message :return: the decorated function, that will perform input validation (using `_assert_input_is_valid`) before executing the function's code everytime it is executed. """ error_type, help_msg, none_policy, _constructor_of_cls_ = pop_kwargs(kwargs, [('error_type', None), ('help_msg', None), ('none_policy', None), ('_constructor_of_cls_', None)], allow_others=True) # the rest of keyword arguments is used as context. kw_context_args = kwargs none_policy = none_policy or NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE # retrieve target function signature func_sig = signature(func) # create the new validator if _constructor_of_cls_ is None: # standard method: input validator new_validator = _create_function_validator(func, func_sig, arg_name, *validation_func, none_policy=none_policy, error_type=error_type, help_msg=help_msg, **kw_context_args) else: # class constructor: field validator new_validator = _create_function_validator(func, func_sig, arg_name, *validation_func, none_policy=none_policy, error_type=error_type, help_msg=help_msg, validated_class=_constructor_of_cls_, validated_class_field_name=arg_name, **kw_context_args) # decorate or update decorator with this new validator return decorate_with_validators(func, func_signature=func_sig, **{arg_name: new_validator})
Depending on none_policy and of the fact that the target parameter is nonable or not, returns a corresponding NonePolicy :param is_nonable: :param none_policy: :return: def _get_final_none_policy_for_validator(is_nonable, # type: bool none_policy # type: NoneArgPolicy ): """ Depending on none_policy and of the fact that the target parameter is nonable or not, returns a corresponding NonePolicy :param is_nonable: :param none_policy: :return: """ if none_policy in {NonePolicy.VALIDATE, NonePolicy.SKIP, NonePolicy.FAIL}: none_policy_to_use = none_policy elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE: none_policy_to_use = NonePolicy.SKIP if is_nonable else NonePolicy.VALIDATE elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_FAIL: none_policy_to_use = NonePolicy.SKIP if is_nonable else NonePolicy.FAIL else: raise ValueError('Invalid none policy: ' + str(none_policy)) return none_policy_to_use
Utility method to decorate the provided function with the provided input and output Validator objects. Since this method takes Validator objects as argument, it is for advanced users. :param func: the function to decorate. It might already be decorated, this method will check it and wont create another wrapper in this case, simply adding the validators to the existing wrapper :param func_signature: the function's signature if it is already known (internal calls), otherwise it will be found again by inspection :param validators: a dictionary of arg_name (or _out_) => Validator or list of Validator :return: def decorate_with_validators(func, func_signature=None, # type: Signature **validators # type: Validator ): """ Utility method to decorate the provided function with the provided input and output Validator objects. Since this method takes Validator objects as argument, it is for advanced users. :param func: the function to decorate. It might already be decorated, this method will check it and wont create another wrapper in this case, simply adding the validators to the existing wrapper :param func_signature: the function's signature if it is already known (internal calls), otherwise it will be found again by inspection :param validators: a dictionary of arg_name (or _out_) => Validator or list of Validator :return: """ # first turn the dictionary values into lists only for arg_name, validator in validators.items(): if not isinstance(validator, list): validators[arg_name] = [validator] if hasattr(func, '__wrapped__') and hasattr(func.__wrapped__, '__validators__'): # ---- This function is already wrapped by our validation wrapper ---- # Update the dictionary of validators with the new validator(s) for arg_name, validator in validators.items(): for v in validator: if arg_name in func.__wrapped__.__validators__: func.__wrapped__.__validators__[arg_name].append(v) else: func.__wrapped__.__validators__[arg_name] = [v] # return the function, no need to wrap it further (it is already wrapped) return func else: # ---- This function is not yet wrapped by our validator. ---- # Store the dictionary of validators as an attribute of the function if hasattr(func, '__validators__'): raise ValueError('Function ' + str(func) + ' already has a defined __validators__ attribute, valid8 ' 'decorators can not be applied on it') else: try: func.__validators__ = validators except AttributeError: raise ValueError("Error - Could not add validators list to function '%s'" % func) # either reuse or recompute function signature func_signature = func_signature or signature(func) # create a wrapper with the same signature @wraps(func) def validating_wrapper(*args, **kwargs): """ This is the wrapper that will be called everytime the function is called """ # (a) Perform input validation by applying `_assert_input_is_valid` on all received arguments apply_on_each_func_args_sig(func, args, kwargs, func_signature, func_to_apply=_assert_input_is_valid, func_to_apply_params_dict=func.__validators__) # (b) execute the function as usual res = func(*args, **kwargs) # (c) validate output if needed if _OUT_KEY in func.__validators__: for validator in func.__validators__[_OUT_KEY]: validator.assert_valid(res) return res return validating_wrapper
Called by the `validating_wrapper` in the first step (a) `apply_on_each_func_args` for each function input before executing the function. It simply delegates to the validator. The signature of this function is hardcoded to correspond to `apply_on_each_func_args`'s behaviour and should therefore not be changed. :param input_value: the value to validate :param validator: the Validator object that will be applied on input_value_to_validate :param validated_func: the function for which this validation is performed. This is not used since the Validator knows it already, but we should not change the signature here. :param input_name: the name of the function input that is being validated :return: Nothing def _assert_input_is_valid(input_value, # type: Any validators, # type: List[InputValidator] validated_func, # type: Callable input_name # type: str ): """ Called by the `validating_wrapper` in the first step (a) `apply_on_each_func_args` for each function input before executing the function. It simply delegates to the validator. The signature of this function is hardcoded to correspond to `apply_on_each_func_args`'s behaviour and should therefore not be changed. :param input_value: the value to validate :param validator: the Validator object that will be applied on input_value_to_validate :param validated_func: the function for which this validation is performed. This is not used since the Validator knows it already, but we should not change the signature here. :param input_name: the name of the function input that is being validated :return: Nothing """ for validator in validators: validator.assert_valid(input_name, input_value)
Overrides the base behaviour defined in ValidationError in order to add details about the function. :return: def get_what_txt(self): """ Overrides the base behaviour defined in ValidationError in order to add details about the function. :return: """ return 'input [{var}] for function [{func}]'.format(var=self.get_variable_str(), func=self.validator.get_validated_func_display_name())
Overrides the base behaviour defined in ValidationError in order to add details about the class field. :return: def get_what_txt(self): """ Overrides the base behaviour defined in ValidationError in order to add details about the class field. :return: """ return 'field [{field}] for class [{clazz}]'.format(field=self.get_variable_str(), clazz=self.validator.get_validated_class_display_name())
Generate unique nonce with counter, uuid and rng. def generate_nonce_timestamp(): """ Generate unique nonce with counter, uuid and rng.""" global count rng = botan.rng().get(30) uuid4 = uuid.uuid4().bytes # 16 byte tmpnonce = (bytes(str(count).encode('utf-8'))) + uuid4 + rng nonce = tmpnonce[:41] # 41 byte (328 bit) count += 1 return nonce
recursively merges dict's. not just simple a['key'] = b['key'], if both a and bhave a key who's value is a dict then dict_merge is called on both values and the result stored in the returned dictionary. def dict_merge(*dict_list): """recursively merges dict's. not just simple a['key'] = b['key'], if both a and bhave a key who's value is a dict then dict_merge is called on both values and the result stored in the returned dictionary. """ result = collections.defaultdict(dict) dicts_items = itertools.chain(*[six.iteritems(d or {}) for d in dict_list]) for key, value in dicts_items: src = result[key] if isinstance(src, dict) and isinstance(value, dict): result[key] = dict_merge(src, value) elif isinstance(src, dict) or isinstance(src, six.text_type): result[key] = value elif hasattr(src, "__iter__") and hasattr(value, "__iter__"): result[key] += value else: result[key] = value return dict(result)
Dispatch jobs to remotecis. The remoteci can use this method to request a new job. Before a job is dispatched, the server will flag as 'killed' all the running jobs that were associated with the remoteci. This is because they will never be finished. def schedule_jobs(user): """Dispatch jobs to remotecis. The remoteci can use this method to request a new job. Before a job is dispatched, the server will flag as 'killed' all the running jobs that were associated with the remoteci. This is because they will never be finished. """ values = schemas.job_schedule.post(flask.request.json) values.update({ 'id': utils.gen_uuid(), 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'etag': utils.gen_etag(), 'status': 'new', 'remoteci_id': user.id, 'user_agent': flask.request.environ.get('HTTP_USER_AGENT'), 'client_version': flask.request.environ.get( 'HTTP_CLIENT_VERSION' ), }) topic_id = values.pop('topic_id') topic_id_secondary = values.pop('topic_id_secondary') components_ids = values.pop('components_ids') # check remoteci remoteci = v1_utils.verify_existence_and_get(user.id, models.REMOTECIS) if remoteci['state'] != 'active': message = 'RemoteCI "%s" is disabled.' % remoteci['id'] raise dci_exc.DCIException(message, status_code=412) # check primary topic topic = v1_utils.verify_existence_and_get(topic_id, models.TOPICS) if topic['state'] != 'active': msg = 'Topic %s:%s not active.' % (topic_id, topic['name']) raise dci_exc.DCIException(msg, status_code=412) v1_utils.verify_team_in_topic(user, topic_id) # check secondary topic if topic_id_secondary: topic_secondary = v1_utils.verify_existence_and_get( topic_id_secondary, models.TOPICS) if topic_secondary['state'] != 'active': msg = 'Topic %s:%s not active.' % (topic_id_secondary, topic['name']) raise dci_exc.DCIException(msg, status_code=412) v1_utils.verify_team_in_topic(user, topic_id_secondary) dry_run = values.pop('dry_run') if dry_run: component_types = components.get_component_types_from_topic(topic_id) components_ids = components.get_last_components_by_type( component_types, topic_id ) return flask.Response( json.dumps({'components_ids': components_ids, 'job': None}), 201, content_type='application/json' ) remotecis.kill_existing_jobs(remoteci['id']) values = _build_job(topic_id, remoteci, components_ids, values, topic_id_secondary=topic_id_secondary) return flask.Response(json.dumps({'job': values}), 201, headers={'ETag': values['etag']}, content_type='application/json')
Create a new job in the same topic as the job_id provided and associate the latest components of this topic. def create_new_update_job_from_an_existing_job(user, job_id): """Create a new job in the same topic as the job_id provided and associate the latest components of this topic.""" values = { 'id': utils.gen_uuid(), 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'etag': utils.gen_etag(), 'status': 'new' } original_job_id = job_id original_job = v1_utils.verify_existence_and_get(original_job_id, models.JOBS) if not user.is_in_team(original_job['team_id']): raise dci_exc.Unauthorized() # get the remoteci remoteci_id = str(original_job['remoteci_id']) remoteci = v1_utils.verify_existence_and_get(remoteci_id, models.REMOTECIS) values.update({'remoteci_id': remoteci_id}) # get the associated topic topic_id = str(original_job['topic_id']) v1_utils.verify_existence_and_get(topic_id, models.TOPICS) values.update({ 'user_agent': flask.request.environ.get('HTTP_USER_AGENT'), 'client_version': flask.request.environ.get( 'HTTP_CLIENT_VERSION' ), }) values = _build_job(topic_id, remoteci, [], values, update_previous_job_id=original_job_id) return flask.Response(json.dumps({'job': values}), 201, headers={'ETag': values['etag']}, content_type='application/json')
Create a new job in the 'next topic' of the topic of the provided job_id. def create_new_upgrade_job_from_an_existing_job(user): """Create a new job in the 'next topic' of the topic of the provided job_id.""" values = schemas.job_upgrade.post(flask.request.json) values.update({ 'id': utils.gen_uuid(), 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'etag': utils.gen_etag(), 'status': 'new' }) original_job_id = values.pop('job_id') original_job = v1_utils.verify_existence_and_get(original_job_id, models.JOBS) if not user.is_in_team(original_job['team_id']): raise dci_exc.Unauthorized() # get the remoteci remoteci_id = str(original_job['remoteci_id']) remoteci = v1_utils.verify_existence_and_get(remoteci_id, models.REMOTECIS) values.update({'remoteci_id': remoteci_id}) # get the associated topic topic_id = str(original_job['topic_id']) topic = v1_utils.verify_existence_and_get(topic_id, models.TOPICS) values.update({ 'user_agent': flask.request.environ.get('HTTP_USER_AGENT'), 'client_version': flask.request.environ.get( 'HTTP_CLIENT_VERSION' ), }) next_topic_id = topic['next_topic_id'] if not next_topic_id: raise dci_exc.DCIException( "topic %s does not contains a next topic" % topic_id) # instantiate a new job in the next_topic_id # todo(yassine): make possible the upgrade to choose specific components values = _build_job(next_topic_id, remoteci, [], values, previous_job_id=original_job_id) return flask.Response(json.dumps({'job': values}), 201, headers={'ETag': values['etag']}, content_type='application/json')
Get all jobs. If topic_id is not None, then return all the jobs with a topic pointed by topic_id. def get_all_jobs(user, topic_id=None): """Get all jobs. If topic_id is not None, then return all the jobs with a topic pointed by topic_id. """ # get the diverse parameters args = schemas.args(flask.request.args.to_dict()) # build the query thanks to the QueryBuilder class query = v1_utils.QueryBuilder(_TABLE, args, _JOBS_COLUMNS) # add extra conditions for filtering # # If not admin nor rh employee then restrict the view to the team if user.is_not_super_admin() and not user.is_read_only_user(): query.add_extra_condition( sql.or_( _TABLE.c.team_id.in_(user.teams_ids), _TABLE.c.team_id.in_(user.child_teams_ids))) # # If topic_id not None, then filter by topic_id if topic_id is not None: query.add_extra_condition(_TABLE.c.topic_id == topic_id) # # Get only the non archived jobs query.add_extra_condition(_TABLE.c.state != 'archived') nb_rows = query.get_number_of_rows() rows = query.execute(fetchall=True) rows = v1_utils.format_result(rows, _TABLE.name, args['embed'], _EMBED_MANY) return flask.jsonify({'jobs': rows, '_meta': {'count': nb_rows}})
Update a job def update_job_by_id(user, job_id): """Update a job """ # get If-Match header if_match_etag = utils.check_and_get_etag(flask.request.headers) # get the diverse parameters values = schemas.job.put(flask.request.json) job = v1_utils.verify_existence_and_get(job_id, _TABLE) job = dict(job) if not user.is_in_team(job['team_id']): raise dci_exc.Unauthorized() # Update jobstate if needed status = values.get('status') if status and job.get('status') != status: jobstates.insert_jobstate(user, { 'status': status, 'job_id': job_id }) if status in models.FINAL_STATUSES: jobs_events.create_event(job_id, status, job['topic_id']) where_clause = sql.and_(_TABLE.c.etag == if_match_etag, _TABLE.c.id == job_id) values['etag'] = utils.gen_etag() query = _TABLE.update().returning(*_TABLE.columns).\ where(where_clause).values(**values) result = flask.g.db_conn.execute(query) if not result.rowcount: raise dci_exc.DCIConflict('Job', job_id) return flask.Response( json.dumps({'job': result.fetchone()}), 200, headers={'ETag': values['etag']}, content_type='application/json' )
Get all results from job. def get_all_results_from_jobs(user, j_id): """Get all results from job. """ job = v1_utils.verify_existence_and_get(j_id, _TABLE) if not user.is_in_team(job['team_id']) and not user.is_read_only_user(): raise dci_exc.Unauthorized() # get testscases from tests_results query = sql.select([models.TESTS_RESULTS]). \ where(models.TESTS_RESULTS.c.job_id == job['id']) all_tests_results = flask.g.db_conn.execute(query).fetchall() results = [] for test_result in all_tests_results: test_result = dict(test_result) results.append({'filename': test_result['name'], 'name': test_result['name'], 'total': test_result['total'], 'failures': test_result['failures'], 'errors': test_result['errors'], 'skips': test_result['skips'], 'time': test_result['time'], 'regressions': test_result['regressions'], 'successfixes': test_result['successfixes'], 'success': test_result['success'], 'file_id': test_result['file_id']}) return flask.jsonify({'results': results, '_meta': {'count': len(results)}})
Retrieve all tags attached to a job. def get_tags_from_job(user, job_id): """Retrieve all tags attached to a job.""" job = v1_utils.verify_existence_and_get(job_id, _TABLE) if not user.is_in_team(job['team_id']) and not user.is_read_only_user(): raise dci_exc.Unauthorized() JTT = models.JOIN_JOBS_TAGS query = (sql.select([models.TAGS]) .select_from(JTT.join(models.TAGS)) .where(JTT.c.job_id == job_id)) rows = flask.g.db_conn.execute(query) return flask.jsonify({'tags': rows, '_meta': {'count': rows.rowcount}})
Add a tag to a job. def add_tag_to_job(user, job_id): """Add a tag to a job.""" job = v1_utils.verify_existence_and_get(job_id, _TABLE) if not user.is_in_team(job['team_id']): raise dci_exc.Unauthorized() values = { 'job_id': job_id } job_tagged = tags.add_tag_to_resource(values, models.JOIN_JOBS_TAGS) return flask.Response(json.dumps(job_tagged), 201, content_type='application/json')
Delete a tag from a job. def delete_tag_from_job(user, job_id, tag_id): """Delete a tag from a job.""" _JJT = models.JOIN_JOBS_TAGS job = v1_utils.verify_existence_and_get(job_id, _TABLE) if not user.is_in_team(job['team_id']): raise dci_exc.Unauthorized() v1_utils.verify_existence_and_get(tag_id, models.TAGS) query = _JJT.delete().where(sql.and_(_JJT.c.tag_id == tag_id, _JJT.c.job_id == job_id)) try: flask.g.db_conn.execute(query) except sa_exc.IntegrityError: raise dci_exc.DCICreationConflict('tag', 'tag_id') return flask.Response(None, 204, content_type='application/json')
Return the parent of the given node, based on an internal dictionary mapping of child nodes to the child's parent required since ElementTree doesn't make info about node ancestry/parentage available. def _lookup_node_parent(self, node): """ Return the parent of the given node, based on an internal dictionary mapping of child nodes to the child's parent required since ElementTree doesn't make info about node ancestry/parentage available. """ # Basic caching of our internal ancestry dict to help performance if not node in self.CACHED_ANCESTRY_DICT: # Given node isn't in cached ancestry dictionary, rebuild this now ancestry_dict = dict( (c, p) for p in self._impl_document.getiterator() for c in p) self.CACHED_ANCESTRY_DICT = ancestry_dict return self.CACHED_ANCESTRY_DICT[node]
Return True if the given node is an ElementTree Element, a fact that can be tricky to determine if the cElementTree implementation is used. def _is_node_an_element(self, node): """ Return True if the given node is an ElementTree Element, a fact that can be tricky to determine if the cElementTree implementation is used. """ # Try the simplest approach first, works for plain old ElementTree if isinstance(node, BaseET.Element): return True # For cElementTree we need to be more cunning (or find a better way) if hasattr(node, 'makeelement') and isinstance(node.tag, basestring): return True
Return result of performing the given XPath query on the given node. All known namespace prefix-to-URI mappings in the document are automatically included in the XPath invocation. If an empty/default namespace (i.e. None) is defined, this is converted to the prefix name '_' so it can be used despite empty namespace prefixes being unsupported by XPath. def xpath_on_node(self, node, xpath, **kwargs): """ Return result of performing the given XPath query on the given node. All known namespace prefix-to-URI mappings in the document are automatically included in the XPath invocation. If an empty/default namespace (i.e. None) is defined, this is converted to the prefix name '_' so it can be used despite empty namespace prefixes being unsupported by XPath. """ namespaces_dict = {} if 'namespaces' in kwargs: namespaces_dict.update(kwargs['namespaces']) # Empty namespace prefix is not supported, convert to '_' prefix if None in namespaces_dict: default_ns_uri = namespaces_dict.pop(None) namespaces_dict['_'] = default_ns_uri # If no default namespace URI defined, use root's namespace (if any) if not '_' in namespaces_dict: root = self.get_impl_root(node) qname, ns_uri, prefix, local_name = self._unpack_name( root.tag, root) if ns_uri: namespaces_dict['_'] = ns_uri # Include XMLNS namespace if it's not already defined if not 'xmlns' in namespaces_dict: namespaces_dict['xmlns'] = nodes.Node.XMLNS_URI return node.findall(xpath, namespaces_dict)
List the entries to be purged from the database. def get_to_purge_archived_resources(user, table): """List the entries to be purged from the database. """ if user.is_not_super_admin(): raise dci_exc.Unauthorized() archived_resources = get_archived_resources(table) return flask.jsonify({table.name: archived_resources, '_meta': {'count': len(archived_resources)}})
Remove the entries to be purged from the database. def purge_archived_resources(user, table): """Remove the entries to be purged from the database. """ if user.is_not_super_admin(): raise dci_exc.Unauthorized() where_clause = sql.and_( table.c.state == 'archived' ) query = table.delete().where(where_clause) flask.g.db_conn.execute(query) return flask.Response(None, 204, content_type='application/json')
Refresh the resource API Secret. def refresh_api_secret(user, resource, table): """Refresh the resource API Secret. """ resource_name = table.name[0:-1] where_clause = sql.and_( table.c.etag == resource['etag'], table.c.id == resource['id'], ) values = { 'api_secret': signature.gen_secret(), 'etag': utils.gen_etag() } query = table.update().where(where_clause).values(**values) result = flask.g.db_conn.execute(query) if not result.rowcount: raise dci_exc.DCIConflict(resource_name, resource['id']) res = flask.jsonify(({'id': resource['id'], 'etag': resource['etag'], 'api_secret': values['api_secret']})) res.headers.add_header('ETag', values['etag']) return res
Generate a package.json file. def npm(package_json, output_file, pinned_file): """Generate a package.json file.""" amd_build_deprecation_warning() try: version = get_distribution(current_app.name).version except DistributionNotFound: version = '' output = { 'name': current_app.name, 'version': make_semver(version) if version else version, 'dependencies': {}, } # Load base file if package_json: output = dict(output, **json.load(package_json)) # Iterate over bundles deps = extract_deps(current_app.extensions['invenio-assets'].env, click.echo) output['dependencies'].update(deps) # Load pinned dependencies if pinned_file: output['dependencies'].update( json.load(pinned_file).get('dependencies', {})) # Write to static folder if output file is not specified if output_file is None: if not os.path.exists(current_app.static_folder): os.makedirs(current_app.static_folder) output_file = open( os.path.join(current_app.static_folder, 'package.json'), 'w') click.echo('Writing {0}'.format(output_file.name)) json.dump(output, output_file, indent=4) output_file.close()
Returns a list of the latest root cause analysis results for a specified check. Optional Parameters: * limit -- Limits the number of returned results to the specified quantity. Type: Integer Default: 100 * offset -- Offset for listing. (Requires limit.) Type: Integer Default: 0 * time_from -- Return only results with timestamp of first test greater or equal to this value. Format is UNIX timestamp. Type: Integer Default: 0 * time_to -- Return only results with timestamp of first test less or equal to this value. Format is UNIX timestamp. Type: Integer Default: Current Time Returned structure: [ { 'id' : <Integer> Analysis id 'timefirsttest' : <Integer> Time of test that initiated the confirmation test 'timeconfrimtest' : <Integer> Time of the confirmation test that perfromed the error analysis }, ... ] def getAnalyses(self, **kwargs): """Returns a list of the latest root cause analysis results for a specified check. Optional Parameters: * limit -- Limits the number of returned results to the specified quantity. Type: Integer Default: 100 * offset -- Offset for listing. (Requires limit.) Type: Integer Default: 0 * time_from -- Return only results with timestamp of first test greater or equal to this value. Format is UNIX timestamp. Type: Integer Default: 0 * time_to -- Return only results with timestamp of first test less or equal to this value. Format is UNIX timestamp. Type: Integer Default: Current Time Returned structure: [ { 'id' : <Integer> Analysis id 'timefirsttest' : <Integer> Time of test that initiated the confirmation test 'timeconfrimtest' : <Integer> Time of the confirmation test that perfromed the error analysis }, ... ] """ # 'from' is a reserved word, use time_from instead if kwargs.get('time_from'): kwargs['from'] = kwargs.get('time_from') del kwargs['time_from'] if kwargs.get('time_to'): kwargs['to'] = kwargs.get('time_to') del kwargs['time_to'] # Warn user about unhandled kwargs for key in kwargs: if key not in ['limit', 'offset', 'from', 'to']: sys.stderr.write('%s not a valid argument for analysis()\n' % key) response = self.pingdom.request('GET', 'analysis/%s' % self.id, kwargs) return [PingdomAnalysis(self, x) for x in response.json()['analysis']]
Update check details, returns dictionary of details def getDetails(self): """Update check details, returns dictionary of details""" response = self.pingdom.request('GET', 'checks/%s' % self.id) self.__addDetails__(response.json()['check']) return response.json()['check']
Modify settings for a check. The provided settings will overwrite previous values. Settings not provided will stay the same as before the update. To clear an existing value, provide an empty value. Please note that you cannot change the type of a check once it has been created. General parameters: * name -- Check name Type: String * host - Target host Type: String * paused -- Check should be paused Type: Boolean * resolution -- Check resolution time (in minutes) Type: Integer [1, 5, 15, 30, 60] * contactids -- Comma separated list of contact IDs Type: String * sendtoemail -- Send alerts as email Type: Boolean * sendtosms -- Send alerts as SMS Type: Boolean * sendtotwitter -- Send alerts through Twitter Type: Boolean * sendtoiphone -- Send alerts to iPhone Type: Boolean * sendtoandroid -- Send alerts to Android Type: Boolean * sendnotificationwhendown -- Send notification when check is down the given number of times Type: Integer * notifyagainevery -- Set how many results to wait for in between notices Type: Integer * notifywhenbackup -- Notify when back up again Type: Boolean * use_legacy_notifications -- Use old notifications instead of BeepManager Type: Boolean * probe_filters -- Can be one of region: NA, region: EU, region: APAC Type: String HTTP check options: * url -- Target path on server Type: String * encryption -- Use SSL/TLS Type: Boolean * port -- Target server port Type: Integer * auth -- Username and password for HTTP authentication Example: user:password Type: String * shouldcontain -- Target site should contain this string. Cannot be combined with 'shouldnotcontain' Type: String * shouldnotcontain -- Target site should not contain this string. Cannot be combined with 'shouldcontain' Type: String * postdata -- Data that should be posted to the web page, for example submission data for a sign-up or login form. The data needs to be formatted in the same way as a web browser would send it to the web server Type: String * requestheader<NAME> -- Custom HTTP header, replace <NAME> with desired header name. Header in form: Header:Value Type: String HTTPCustom check options: * url -- Target path on server Type: String * encryption -- Use SSL/TLS Type: Boolean * port -- Target server port Type: Integer * auth -- Username and password for HTTP authentication Example: user:password Type: String * additionalurls -- Colon-separated list of additonal URLS with hostname included Type: String TCP check options: * port -- Target server port Type: Integer * stringtosend -- String to send Type: String * stringtoexpect -- String to expect in response Type: String DNS check options: * expectedip -- Expected IP Type: String * nameserver -- Nameserver to check Type: String UDP check options: * port -- Target server port Type: Integer * stringtosend -- String to send Type: String * stringtoexpect -- String to expect in response Type: String SMTP check options: * port -- Target server port Type: Integer * auth -- Username and password for target SMTP authentication. Example: user:password Type: String * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean POP3 check options: * port -- Target server port Type: Integer * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean IMAP check options: * port -- Target server port Type: Integer * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean def modify(self, **kwargs): """Modify settings for a check. The provided settings will overwrite previous values. Settings not provided will stay the same as before the update. To clear an existing value, provide an empty value. Please note that you cannot change the type of a check once it has been created. General parameters: * name -- Check name Type: String * host - Target host Type: String * paused -- Check should be paused Type: Boolean * resolution -- Check resolution time (in minutes) Type: Integer [1, 5, 15, 30, 60] * contactids -- Comma separated list of contact IDs Type: String * sendtoemail -- Send alerts as email Type: Boolean * sendtosms -- Send alerts as SMS Type: Boolean * sendtotwitter -- Send alerts through Twitter Type: Boolean * sendtoiphone -- Send alerts to iPhone Type: Boolean * sendtoandroid -- Send alerts to Android Type: Boolean * sendnotificationwhendown -- Send notification when check is down the given number of times Type: Integer * notifyagainevery -- Set how many results to wait for in between notices Type: Integer * notifywhenbackup -- Notify when back up again Type: Boolean * use_legacy_notifications -- Use old notifications instead of BeepManager Type: Boolean * probe_filters -- Can be one of region: NA, region: EU, region: APAC Type: String HTTP check options: * url -- Target path on server Type: String * encryption -- Use SSL/TLS Type: Boolean * port -- Target server port Type: Integer * auth -- Username and password for HTTP authentication Example: user:password Type: String * shouldcontain -- Target site should contain this string. Cannot be combined with 'shouldnotcontain' Type: String * shouldnotcontain -- Target site should not contain this string. Cannot be combined with 'shouldcontain' Type: String * postdata -- Data that should be posted to the web page, for example submission data for a sign-up or login form. The data needs to be formatted in the same way as a web browser would send it to the web server Type: String * requestheader<NAME> -- Custom HTTP header, replace <NAME> with desired header name. Header in form: Header:Value Type: String HTTPCustom check options: * url -- Target path on server Type: String * encryption -- Use SSL/TLS Type: Boolean * port -- Target server port Type: Integer * auth -- Username and password for HTTP authentication Example: user:password Type: String * additionalurls -- Colon-separated list of additonal URLS with hostname included Type: String TCP check options: * port -- Target server port Type: Integer * stringtosend -- String to send Type: String * stringtoexpect -- String to expect in response Type: String DNS check options: * expectedip -- Expected IP Type: String * nameserver -- Nameserver to check Type: String UDP check options: * port -- Target server port Type: Integer * stringtosend -- String to send Type: String * stringtoexpect -- String to expect in response Type: String SMTP check options: * port -- Target server port Type: Integer * auth -- Username and password for target SMTP authentication. Example: user:password Type: String * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean POP3 check options: * port -- Target server port Type: Integer * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean IMAP check options: * port -- Target server port Type: Integer * stringtoexpect -- String to expect in response Type: String * encryption -- Use connection encryption Type: Boolean """ # Warn user about unhandled parameters for key in kwargs: if key not in ['paused', 'resolution', 'contactids', 'sendtoemail', 'sendtosms', 'sendtotwitter', 'sendtoiphone', 'sendnotificationwhendown', 'notifyagainevery', 'notifywhenbackup', 'created', 'type', 'hostname', 'status', 'lasterrortime', 'lasttesttime', 'url', 'encryption', 'port', 'auth', 'shouldcontain', 'shouldnotcontain', 'postdata', 'additionalurls', 'stringtosend', 'stringtoexpect', 'expectedip', 'nameserver', 'use_legacy_notifications', 'host', 'alert_policy', 'autoresolve', 'probe_filters']: sys.stderr.write("'%s'" % key + ' is not a valid argument of' + '<PingdomCheck>.modify()\n') # If one of the legacy parameters is used, it is required to set the legacy flag. # https://github.com/KennethWilke/PingdomLib/issues/12 if any([k for k in kwargs if k in legacy_notification_parameters]): if "use_legacy_notifications" in kwargs and kwargs["use_legacy_notifications"] != True: raise Exception("Cannot set legacy parameter when use_legacy_notifications is not True") kwargs["use_legacy_notifications"] = True response = self.pingdom.request("PUT", 'checks/%s' % self.id, kwargs) return response.json()['message']
Get the average time / uptime value for a specified check and time period. Optional parameters: * time_from -- Start time of period. Format is UNIX timestamp Type: Integer Default: 0 * time_to -- End time of period. Format is UNIX timestamp Type: Integer Default: Current time * probes -- Filter to only use results from a list of probes. Format is a comma separated list of probe identifiers Type: String Default: All probes * includeuptime -- Include uptime information Type: Boolean Default: False * bycountry -- Split response times into country groups Type: Boolean Default: False * byprobe -- Split response times into probe groups Type: Boolean Default: False Returned structure: { 'responsetime' : { 'to' : <Integer> Start time of period 'from' : <Integer> End time of period 'avgresponse' : <Integer> Total average response time in milliseconds }, < More can be included with optional parameters > } def averages(self, **kwargs): """Get the average time / uptime value for a specified check and time period. Optional parameters: * time_from -- Start time of period. Format is UNIX timestamp Type: Integer Default: 0 * time_to -- End time of period. Format is UNIX timestamp Type: Integer Default: Current time * probes -- Filter to only use results from a list of probes. Format is a comma separated list of probe identifiers Type: String Default: All probes * includeuptime -- Include uptime information Type: Boolean Default: False * bycountry -- Split response times into country groups Type: Boolean Default: False * byprobe -- Split response times into probe groups Type: Boolean Default: False Returned structure: { 'responsetime' : { 'to' : <Integer> Start time of period 'from' : <Integer> End time of period 'avgresponse' : <Integer> Total average response time in milliseconds }, < More can be included with optional parameters > } """ # 'from' is a reserved word, use time_from instead if kwargs.get('time_from'): kwargs['from'] = kwargs.get('time_from') del kwargs['time_from'] if kwargs.get('time_to'): kwargs['to'] = kwargs.get('time_to') del kwargs['time_to'] # Warn user about unhandled parameters for key in kwargs: if key not in ['from', 'to', 'probes', 'includeuptime', 'bycountry', 'byprobe']: sys.stderr.write("'%s'" % key + ' is not a valid argument of' + '<PingdomCheck.averages()\n') response = self.pingdom.request('GET', 'summary.average/%s' % self.id, kwargs) return response.json()['summary']
Get a list of probes that performed tests for a specified check during a specified period. def probes(self, fromtime, totime=None): """Get a list of probes that performed tests for a specified check during a specified period.""" args = {'from': fromtime} if totime: args['to'] = totime response = self.pingdom.request('GET', 'summary.probes/%s' % self.id, args) return response.json()['probes']
Activate public report for this check. Returns status message def publishPublicReport(self): """Activate public report for this check. Returns status message""" response = self.pingdom.request('PUT', 'reports.public/%s' % self.id) return response.json()['message']
Deactivate public report for this check. Returns status message def removePublicReport(self): """Deactivate public report for this check. Returns status message""" response = self.pingdom.request('DELETE', 'reports.public/%s' % self.id) return response.json()['message']
Extract the dependencies from the bundle and its sub-bundles. def extract_deps(bundles, log=None): """Extract the dependencies from the bundle and its sub-bundles.""" def _flatten(bundle): deps = [] if hasattr(bundle, 'npm'): deps.append(bundle.npm) for content in bundle.contents: if isinstance(content, BundleBase): deps.extend(_flatten(content)) return deps flatten_deps = [] for bundle in bundles: flatten_deps.extend(_flatten(bundle)) packages = defaultdict(list) for dep in flatten_deps: for pkg, version in dep.items(): packages[pkg].append(version) deps = {} for package, versions in packages.items(): deps[package] = semver.max_satisfying(versions, '*', True) if log and len(versions) > 1: log('Warn: {0} version {1} resolved to: {2}'.format( repr(package), versions, repr(deps[package]) )) return deps
Make a semantic version from Python PEP440 version. Semantic versions does not handle post-releases. def make_semver(version_str): """Make a semantic version from Python PEP440 version. Semantic versions does not handle post-releases. """ v = parse_version(version_str) major = v._version.release[0] try: minor = v._version.release[1] except IndexError: minor = 0 try: patch = v._version.release[2] except IndexError: patch = 0 prerelease = [] if v._version.pre: prerelease.append(''.join(str(x) for x in v._version.pre)) if v._version.dev: prerelease.append(''.join(str(x) for x in v._version.dev)) prerelease = '.'.join(prerelease) # Create semver version = '{0}.{1}.{2}'.format(major, minor, patch) if prerelease: version += '-{0}'.format(prerelease) if v.local: version += '+{0}'.format(v.local) return version
Calculate the max number of item that an option can stored in the pool at give time. This is to limit the pool size to POOL_SIZE Args: option_index (int): the index of the option to calculate the size for pool (dict): answer pool num_option (int): total number of options available for the question item_length (int): the length of the item Returns: int: the max number of items that `option_index` can have def get_max_size(pool, num_option, item_length): """ Calculate the max number of item that an option can stored in the pool at give time. This is to limit the pool size to POOL_SIZE Args: option_index (int): the index of the option to calculate the size for pool (dict): answer pool num_option (int): total number of options available for the question item_length (int): the length of the item Returns: int: the max number of items that `option_index` can have """ max_items = POOL_SIZE / item_length # existing items plus the reserved for min size. If there is an option has 1 item, POOL_OPTION_MIN_SIZE - 1 space # is reserved. existing = POOL_OPTION_MIN_SIZE * num_option + sum([max(0, len(pool.get(i, {})) - 5) for i in xrange(num_option)]) return int(max_items - existing)
submit a student answer to the answer pool The answer maybe selected to stay in the pool depending on the selection algorithm Args: pool (dict): answer pool Answer pool format: { option1_index: { 'student_id': { can store algorithm specific info here }, ... } option2_index: ... } answer (int): the option student selected rationale (str): the rationale text student_id (str): student identifier algo (str): the selection algorithm options (dict): the options available in the question Raises: UnknownChooseAnswerAlgorithm: when we don't know the algorithm def offer_answer(pool, answer, rationale, student_id, algo, options): """ submit a student answer to the answer pool The answer maybe selected to stay in the pool depending on the selection algorithm Args: pool (dict): answer pool Answer pool format: { option1_index: { 'student_id': { can store algorithm specific info here }, ... } option2_index: ... } answer (int): the option student selected rationale (str): the rationale text student_id (str): student identifier algo (str): the selection algorithm options (dict): the options available in the question Raises: UnknownChooseAnswerAlgorithm: when we don't know the algorithm """ if algo['name'] == 'simple': offer_simple(pool, answer, rationale, student_id, options) elif algo['name'] == 'random': offer_random(pool, answer, rationale, student_id, options) else: raise UnknownChooseAnswerAlgorithm()
The simple selection algorithm. This algorithm randomly select an answer from the pool to discard and add the new one when the pool reaches the limit def offer_simple(pool, answer, rationale, student_id, options): """ The simple selection algorithm. This algorithm randomly select an answer from the pool to discard and add the new one when the pool reaches the limit """ existing = pool.setdefault(answer, {}) if len(existing) >= get_max_size(pool, len(options), POOL_ITEM_LENGTH_SIMPLE): student_id_to_remove = random.choice(existing.keys()) del existing[student_id_to_remove] existing[student_id] = {} pool[answer] = existing
The random selection algorithm. The same as simple algorithm def offer_random(pool, answer, rationale, student_id, options): """ The random selection algorithm. The same as simple algorithm """ offer_simple(pool, answer, rationale, student_id, options)
This validator checks if the answers includes all possible options Args: answers (str): the answers to be checked options (dict): all options that should exist in the answers algo (str): selection algorithm Returns: None if everything is good. Otherwise, the missing option error message. def validate_seeded_answers_simple(answers, options, algo): """ This validator checks if the answers includes all possible options Args: answers (str): the answers to be checked options (dict): all options that should exist in the answers algo (str): selection algorithm Returns: None if everything is good. Otherwise, the missing option error message. """ seen_options = {} for answer in answers: if answer: key = options[answer['answer']].get('text') if options[answer['answer']].get('image_url'): key += options[answer['answer']].get('image_url') seen_options.setdefault(key, 0) seen_options[key] += 1 missing_options = [] index = 1 for option in options: key = option.get('text') + option.get('image_url') if option.get('image_url') else option.get('text') if option.get('text') != 'n/a': if seen_options.get(key, 0) == 0: missing_options.append(_('Option ') + str(index)) index += 1 if missing_options: return {'seed_error': _('Missing option seed(s): ') + ', '.join(missing_options)} return None
Validate answers based on selection algorithm This is called when instructor setup the tool and providing seeded answers to the question. This function is trying to validate if instructor provided enough seeds for a give algorithm. e.g. we require 1 seed for each option in simple algorithm and at least 1 seed for random algorithm. Because otherwise, the first student won't be able to see the answers on the second step where he/she suppose to compare and review other students answers. Args: answers (list): list of dict that contain seeded answers options (dict): all options that should exist in the answers algo (str): selection algorithm Returns: None if successful, otherwise error message def validate_seeded_answers(answers, options, algo): """ Validate answers based on selection algorithm This is called when instructor setup the tool and providing seeded answers to the question. This function is trying to validate if instructor provided enough seeds for a give algorithm. e.g. we require 1 seed for each option in simple algorithm and at least 1 seed for random algorithm. Because otherwise, the first student won't be able to see the answers on the second step where he/she suppose to compare and review other students answers. Args: answers (list): list of dict that contain seeded answers options (dict): all options that should exist in the answers algo (str): selection algorithm Returns: None if successful, otherwise error message """ if algo['name'] == 'simple': return validate_seeded_answers_simple(answers, options, algo) elif algo['name'] == 'random': return validate_seeded_answers_random(answers) else: raise UnknownChooseAnswerAlgorithm()
Select other student's answers from answer pool or seeded answers based on the selection algorithm Args: pool (dict): answer pool, format: { option1_index: { student_id: { can store algorithm specific info here } }, option2_index: { student_id: { ... } } } seeded_answers (list): seeded answers from instructor [ {'answer': 0, 'rationale': 'rationale A'}, {'answer': 1, 'rationale': 'rationale B'}, ] get_student_item_dict (callable): get student item dict function to return student item dict algo (str): selection algorithm options (dict): answer options for the question Returns: dict: answers based on the selection algorithm def get_other_answers(pool, seeded_answers, get_student_item_dict, algo, options): """ Select other student's answers from answer pool or seeded answers based on the selection algorithm Args: pool (dict): answer pool, format: { option1_index: { student_id: { can store algorithm specific info here } }, option2_index: { student_id: { ... } } } seeded_answers (list): seeded answers from instructor [ {'answer': 0, 'rationale': 'rationale A'}, {'answer': 1, 'rationale': 'rationale B'}, ] get_student_item_dict (callable): get student item dict function to return student item dict algo (str): selection algorithm options (dict): answer options for the question Returns: dict: answers based on the selection algorithm """ # "#" means the number of responses returned should be the same as the number of options. num_responses = len(options) \ if 'num_responses' not in algo or algo['num_responses'] == "#" \ else int(algo['num_responses']) if algo['name'] == 'simple': return get_other_answers_simple(pool, seeded_answers, get_student_item_dict, num_responses) elif algo['name'] == 'random': return get_other_answers_random(pool, seeded_answers, get_student_item_dict, num_responses) else: raise UnknownChooseAnswerAlgorithm()
Get answers from others with simple algorithm, which picks one answer for each option. Args: see `get_other_answers` num_responses (int): the number of responses to be returned. This value may not be respected if there is not enough answers to return Returns: dict: answers based on the selection algorithm def get_other_answers_simple(pool, seeded_answers, get_student_item_dict, num_responses): """ Get answers from others with simple algorithm, which picks one answer for each option. Args: see `get_other_answers` num_responses (int): the number of responses to be returned. This value may not be respected if there is not enough answers to return Returns: dict: answers based on the selection algorithm """ ret = [] # clean up answers so that all keys are int pool = {int(k): v for k, v in pool.items()} total_in_pool = len(seeded_answers) merged_pool = convert_seeded_answers(seeded_answers) student_id = get_student_item_dict()['student_id'] # merge the dictionaries in the answer dictionary for key in pool: total_in_pool += len(pool[key]) # if student_id has value, we assume the student just submitted an answer. So removing it # from total number in the pool if student_id in pool[key].keys(): total_in_pool -= 1 if key in merged_pool: merged_pool[key].update(pool[key].items()) else: merged_pool[key] = pool[key] # remember which option+student_id is selected, so that we don't have duplicates in the result selected = [] # loop until we have enough answers to return while len(ret) < min(num_responses, total_in_pool): for option, students in merged_pool.items(): student = student_id i = 0 while (student == student_id or i > 100) and (str(option) + student) not in selected: # retry until we got a different one or after 100 retries # we are suppose to get a different student answer or a seeded one in a few tries # as we have at least one seeded answer for each option in the algo. And it is not # suppose to overflow i order to break the loop student = random.choice(students.keys()) i += 1 selected.append(str(option)+student) if student.startswith('seeded'): # seeded answer, get the rationale from local rationale = students[student] else: student_item = get_student_item_dict(student) submission = sas_api.get_answers_for_student(student_item) rationale = submission.get_rationale(0) ret.append({'option': option, 'rationale': rationale}) # check if we have enough answers if len(ret) >= min(num_responses, total_in_pool): break return {"answers": ret}
Get answers from others with random algorithm, which randomly select answer from the pool. Student may get three answers for option 1 or one answer for option 1 and two answers for option 2. Args: see `get_other_answers` num_responses (int): the number of responses to be returned. This value may not be respected if there is not enough answers to return Returns: dict: answers based on the selection algorithm def get_other_answers_random(pool, seeded_answers, get_student_item_dict, num_responses): """ Get answers from others with random algorithm, which randomly select answer from the pool. Student may get three answers for option 1 or one answer for option 1 and two answers for option 2. Args: see `get_other_answers` num_responses (int): the number of responses to be returned. This value may not be respected if there is not enough answers to return Returns: dict: answers based on the selection algorithm """ ret = [] # clean up answers so that all keys are int pool = {int(k): v for k, v in pool.items()} seeded = {'seeded'+str(index): answer for index, answer in enumerate(seeded_answers)} merged_pool = seeded.keys() for key in pool: merged_pool += pool[key].keys() # shuffle random.shuffle(merged_pool) # get student identifier student_id = get_student_item_dict()['student_id'] for student in merged_pool: if len(ret) >= num_responses: # have enough answers break elif student == student_id: # this is the student's answer so don't return continue if student.startswith('seeded'): option = seeded[student]['answer'] rationale = seeded[student]['rationale'] else: student_item = get_student_item_dict(student) submission = sas_api.get_answers_for_student(student_item) rationale = submission.get_rationale(0) option = submission.get_vote(0) ret.append({'option': option, 'rationale': rationale}) return {"answers": ret}
Convert seeded answers into the format that can be merged into student answers. Args: answers (list): seeded answers Returns: dict: seeded answers with student answers format: { 0: { 'seeded0': 'rationaleA' } 1: { 'seeded1': 'rationaleB' } } def convert_seeded_answers(answers): """ Convert seeded answers into the format that can be merged into student answers. Args: answers (list): seeded answers Returns: dict: seeded answers with student answers format: { 0: { 'seeded0': 'rationaleA' } 1: { 'seeded1': 'rationaleB' } } """ converted = {} for index, answer in enumerate(answers): converted.setdefault(answer['answer'], {}) converted[answer['answer']]['seeded' + str(index)] = answer['rationale'] return converted
Batch processors stopped polling at version 2, so they no longer needed the idleInterval attribute. They also gained a scheduled attribute which tracks their interaction with the scheduler. Since they stopped polling, we also set them up as a timed event here to make sure that they don't silently disappear, never to be seen again: running them with the scheduler gives them a chance to figure out what's up and set up whatever other state they need to continue to run. Since this introduces a new dependency of all batch processors on a powerup for the IScheduler, install a Scheduler or a SubScheduler if one is not already present. def upgradeProcessor1to2(oldProcessor): """ Batch processors stopped polling at version 2, so they no longer needed the idleInterval attribute. They also gained a scheduled attribute which tracks their interaction with the scheduler. Since they stopped polling, we also set them up as a timed event here to make sure that they don't silently disappear, never to be seen again: running them with the scheduler gives them a chance to figure out what's up and set up whatever other state they need to continue to run. Since this introduces a new dependency of all batch processors on a powerup for the IScheduler, install a Scheduler or a SubScheduler if one is not already present. """ newProcessor = oldProcessor.upgradeVersion( oldProcessor.typeName, 1, 2, busyInterval=oldProcessor.busyInterval) newProcessor.scheduled = extime.Time() s = newProcessor.store sch = iaxiom.IScheduler(s, None) if sch is None: if s.parent is None: # Only site stores have no parents. sch = Scheduler(store=s) else: # Substores get subschedulers. sch = SubScheduler(store=s) installOn(sch, s) # And set it up to run. sch.schedule(newProcessor, newProcessor.scheduled) return newProcessor
Create an Axiom Item type which is suitable to use as a batch processor for the given Axiom Item type. Processors created this way depend on a L{iaxiom.IScheduler} powerup on the on which store they are installed. @type forType: L{item.MetaItem} @param forType: The Axiom Item type for which to create a batch processor type. @rtype: L{item.MetaItem} @return: An Axiom Item type suitable for use as a batch processor. If such a type previously existed, it will be returned. Otherwise, a new type is created. def processor(forType): """ Create an Axiom Item type which is suitable to use as a batch processor for the given Axiom Item type. Processors created this way depend on a L{iaxiom.IScheduler} powerup on the on which store they are installed. @type forType: L{item.MetaItem} @param forType: The Axiom Item type for which to create a batch processor type. @rtype: L{item.MetaItem} @return: An Axiom Item type suitable for use as a batch processor. If such a type previously existed, it will be returned. Otherwise, a new type is created. """ MILLI = 1000 try: processor = _processors[forType] except KeyError: def __init__(self, *a, **kw): item.Item.__init__(self, *a, **kw) self.store.powerUp(self, iaxiom.IBatchProcessor) attrs = { '__name__': 'Batch_' + forType.__name__, '__module__': forType.__module__, '__init__': __init__, '__repr__': lambda self: '<Batch of %s #%d>' % (reflect.qual(self.workUnitType), self.storeID), 'schemaVersion': 2, 'workUnitType': forType, 'scheduled': attributes.timestamp(doc=""" The next time at which this processor is scheduled to run. """, default=None), # MAGIC NUMBERS AREN'T THEY WONDERFUL? 'busyInterval': attributes.integer(doc="", default=MILLI / 10), } _processors[forType] = processor = item.MetaItem( attrs['__name__'], (item.Item, _BatchProcessorMixin), attrs) registerUpgrader( upgradeProcessor1to2, _processors[forType].typeName, 1, 2) return processor
Adapt a L{Store} to L{IBatchService}. If C{st} is a substore, return a simple wrapper that delegates to the site store's L{IBatchService} powerup. Return C{None} if C{st} has no L{BatchProcessingControllerService}. def storeBatchServiceSpecialCase(st, pups): """ Adapt a L{Store} to L{IBatchService}. If C{st} is a substore, return a simple wrapper that delegates to the site store's L{IBatchService} powerup. Return C{None} if C{st} has no L{BatchProcessingControllerService}. """ if st.parent is not None: try: return _SubStoreBatchChannel(st) except TypeError: return None storeService = service.IService(st) try: return storeService.getServiceNamed("Batch Processing Controller") except KeyError: return None
Mark the unit of work as failed in the database and update the listener so as to skip it next time. def mark(self): """ Mark the unit of work as failed in the database and update the listener so as to skip it next time. """ self.reliableListener.lastRun = extime.Time() BatchProcessingError( store=self.reliableListener.store, processor=self.reliableListener.processor, listener=self.reliableListener.listener, item=self.workUnit, error=self.failure.getErrorMessage())
Try to run one unit of work through one listener. If there are more listeners or more work, reschedule this item to be run again in C{self.busyInterval} milliseconds, otherwise unschedule it. @rtype: L{extime.Time} or C{None} @return: The next time at which to run this item, used by the scheduler for automatically rescheduling, or None if there is no more work to do. def run(self): """ Try to run one unit of work through one listener. If there are more listeners or more work, reschedule this item to be run again in C{self.busyInterval} milliseconds, otherwise unschedule it. @rtype: L{extime.Time} or C{None} @return: The next time at which to run this item, used by the scheduler for automatically rescheduling, or None if there is no more work to do. """ now = extime.Time() if self.step(): self.scheduled = now + datetime.timedelta(milliseconds=self.busyInterval) else: self.scheduled = None return self.scheduled
Add the given Item to the set which will be notified of Items available for processing. Note: Each Item is processed synchronously. Adding too many listeners to a single batch processor will cause the L{step} method to block while it sends notification to each listener. @param listener: An Item instance which provides a C{processItem} method. @return: An Item representing L{listener}'s persistent tracking state. def addReliableListener(self, listener, style=iaxiom.LOCAL): """ Add the given Item to the set which will be notified of Items available for processing. Note: Each Item is processed synchronously. Adding too many listeners to a single batch processor will cause the L{step} method to block while it sends notification to each listener. @param listener: An Item instance which provides a C{processItem} method. @return: An Item representing L{listener}'s persistent tracking state. """ existing = self.store.findUnique(_ReliableListener, attributes.AND(_ReliableListener.processor == self, _ReliableListener.listener == listener), default=None) if existing is not None: return existing for work in self.store.query(self.workUnitType, sort=self.workUnitType.storeID.descending, limit=1): forwardMark = work.storeID backwardMark = work.storeID + 1 break else: forwardMark = 0 backwardMark = 0 if self.scheduled is None: self.scheduled = extime.Time() iaxiom.IScheduler(self.store).schedule(self, self.scheduled) return _ReliableListener(store=self.store, processor=self, listener=listener, forwardMark=forwardMark, backwardMark=backwardMark, style=style)
Remove a previously added listener. def removeReliableListener(self, listener): """ Remove a previously added listener. """ self.store.query(_ReliableListener, attributes.AND(_ReliableListener.processor == self, _ReliableListener.listener == listener)).deleteFromStore() self.store.query(BatchProcessingError, attributes.AND(BatchProcessingError.processor == self, BatchProcessingError.listener == listener)).deleteFromStore()
Return an iterable of the listeners which have been added to this batch processor. def getReliableListeners(self): """ Return an iterable of the listeners which have been added to this batch processor. """ for rellist in self.store.query(_ReliableListener, _ReliableListener.processor == self): yield rellist.listener
Return an iterable of two-tuples of listeners which raised an exception from C{processItem} and the item which was passed as the argument to that method. def getFailedItems(self): """ Return an iterable of two-tuples of listeners which raised an exception from C{processItem} and the item which was passed as the argument to that method. """ for failed in self.store.query(BatchProcessingError, BatchProcessingError.processor == self): yield (failed.listener, failed.item)
Called to indicate that a new item of the type monitored by this batch processor is being added to the database. If this processor is not already scheduled to run, this will schedule it. It will also start the batch process if it is not yet running and there are any registered remote listeners. def itemAdded(self): """ Called to indicate that a new item of the type monitored by this batch processor is being added to the database. If this processor is not already scheduled to run, this will schedule it. It will also start the batch process if it is not yet running and there are any registered remote listeners. """ localCount = self.store.query( _ReliableListener, attributes.AND(_ReliableListener.processor == self, _ReliableListener.style == iaxiom.LOCAL), limit=1).count() remoteCount = self.store.query( _ReliableListener, attributes.AND(_ReliableListener.processor == self, _ReliableListener.style == iaxiom.REMOTE), limit=1).count() if localCount and self.scheduled is None: self.scheduled = extime.Time() iaxiom.IScheduler(self.store).schedule(self, self.scheduled) if remoteCount: batchService = iaxiom.IBatchService(self.store, None) if batchService is not None: batchService.start()
Invoke the given bound item method in the batch process. Return a Deferred which fires when the method has been invoked. def call(self, itemMethod): """ Invoke the given bound item method in the batch process. Return a Deferred which fires when the method has been invoked. """ item = itemMethod.im_self method = itemMethod.im_func.func_name return self.batchController.getProcess().addCallback( CallItemMethod(storepath=item.store.dbdir, storeid=item.storeID, method=method).do)
Run tasks until stopService is called. def processWhileRunning(self): """ Run tasks until stopService is called. """ work = self.step() for result, more in work: yield result if not self.running: break if more: delay = 0.1 else: delay = 10.0 yield task.deferLater(reactor, delay, lambda: None)
find every column in every sheet and put it in a new sheet or book. def getcols(sheetMatch=None,colMatch="Decay"): """find every column in every sheet and put it in a new sheet or book.""" book=BOOK() if sheetMatch is None: matchingSheets=book.sheetNames print('all %d sheets selected '%(len(matchingSheets))) else: matchingSheets=[x for x in book.sheetNames if sheetMatch in x] print('%d of %d sheets selected matching "%s"'%(len(matchingSheets),len(book.sheetNames),sheetMatch)) matchingSheetsWithCol=[] for sheetName in matchingSheets: i = book.sheetNames.index(sheetName) # index of that sheet for j,colName in enumerate(book.sheets[i].colDesc): if colMatch in colName: matchingSheetsWithCol.append((sheetName,j)) break else: print(" no match in [%s]%s"%(book.bookName,sheetName)) print("%d of %d of those have your column"%(len(matchingSheetsWithCol),len(matchingSheets))) for item in matchingSheetsWithCol: print(item,item[0],item[1])
Recursively upgrade C{store}. def upgradeStore(self, store): """ Recursively upgrade C{store}. """ self.upgradeEverything(store) upgradeExplicitOid(store) for substore in store.query(SubStore): print 'Upgrading: {!r}'.format(substore) self.upgradeStore(substore.open())
Upgrade C{store} performing C{count} upgrades per transaction. Also, catch any exceptions and print out something useful. def perform(self, store, count): """ Upgrade C{store} performing C{count} upgrades per transaction. Also, catch any exceptions and print out something useful. """ self.count = count try: self.upgradeStore(store) print 'Upgrade complete' except errors.ItemUpgradeError as e: print 'Upgrader error:' e.originalFailure.printTraceback(file=sys.stdout) print self.errorMessageFormat % ( e.oldType.typeName, e.storeID, e.oldType.schemaVersion, e.newType.schemaVersion)
Override L{code.InteractiveConsole.runcode} to run the code in a transaction unless the local C{autocommit} is currently set to a true value. def runcode(self, code): """ Override L{code.InteractiveConsole.runcode} to run the code in a transaction unless the local C{autocommit} is currently set to a true value. """ if not self.locals.get('autocommit', None): return self.locals['db'].transact(code.InteractiveConsole.runcode, self, code) return code.InteractiveConsole.runcode(self, code)
Return a dictionary representing the namespace which should be available to the user. def namespace(self): """ Return a dictionary representing the namespace which should be available to the user. """ self._ns = { 'db': self.store, 'store': store, 'autocommit': False, } return self._ns
Create a new account in the given store. @param siteStore: A site Store to which login credentials will be added. @param username: Local part of the username for the credentials to add. @param domain: Domain part of the username for the credentials to add. @param password: Password for the credentials to add. @rtype: L{LoginAccount} @return: The added account. def addAccount(self, siteStore, username, domain, password): """ Create a new account in the given store. @param siteStore: A site Store to which login credentials will be added. @param username: Local part of the username for the credentials to add. @param domain: Domain part of the username for the credentials to add. @param password: Password for the credentials to add. @rtype: L{LoginAccount} @return: The added account. """ for ls in siteStore.query(userbase.LoginSystem): break else: ls = self.installOn(siteStore) try: acc = ls.addAccount(username, domain, password) except userbase.DuplicateUser: raise usage.UsageError("An account by that name already exists.") return acc
Create a new L{Item} subclass with L{numAttributes} integers in its schema. def itemTypeWithSomeAttributes(attributeTypes): """ Create a new L{Item} subclass with L{numAttributes} integers in its schema. """ class SomeItem(Item): typeName = 'someitem_' + str(typeNameCounter()) for i, attributeType in enumerate(attributeTypes): locals()['attr_' + str(i)] = attributeType() return SomeItem
Create some instances of a particular type in a store. def createSomeItems(store, itemType, values, counter): """ Create some instances of a particular type in a store. """ for i in counter: itemType(store=store, **values)
save the instance or create a new one.. def save(self, commit=True): """save the instance or create a new one..""" # walk through the document fields for field_name, field in iter_valid_fields(self._meta): setattr(self.instance, field_name, self.cleaned_data.get(field_name)) if commit: self.instance.save() return self.instance
Return a callable which will invoke C{func} in a transaction using the C{store} attribute of the first parameter passed to it. Typically this is used to create Item methods which are automatically run in a transaction. The attributes of the returned callable will resemble those of C{func} as closely as L{twisted.python.util.mergeFunctionMetadata} can make them. def transacted(func): """ Return a callable which will invoke C{func} in a transaction using the C{store} attribute of the first parameter passed to it. Typically this is used to create Item methods which are automatically run in a transaction. The attributes of the returned callable will resemble those of C{func} as closely as L{twisted.python.util.mergeFunctionMetadata} can make them. """ def transactionified(item, *a, **kw): return item.store.transact(func, item, *a, **kw) return mergeFunctionMetadata(func, transactionified)
Collect all the items that should be deleted when an item or items of a particular item type are deleted. @param tableClass: An L{Item} subclass. @param comparison: A one-argument callable taking an attribute and returning an L{iaxiom.IComparison} describing the items to collect. @return: An iterable of items to delete. def dependentItems(store, tableClass, comparisonFactory): """ Collect all the items that should be deleted when an item or items of a particular item type are deleted. @param tableClass: An L{Item} subclass. @param comparison: A one-argument callable taking an attribute and returning an L{iaxiom.IComparison} describing the items to collect. @return: An iterable of items to delete. """ for cascadingAttr in (_cascadingDeletes.get(tableClass, []) + _cascadingDeletes.get(None, [])): for cascadedItem in store.query(cascadingAttr.type, comparisonFactory(cascadingAttr)): yield cascadedItem
Returns a C{bool} indicating whether deletion of an item or items of a particular item type should be allowed to proceed. @param tableClass: An L{Item} subclass. @param comparison: A one-argument callable taking an attribute and returning an L{iaxiom.IComparison} describing the items to collect. @return: A C{bool} indicating whether deletion should be allowed. def allowDeletion(store, tableClass, comparisonFactory): """ Returns a C{bool} indicating whether deletion of an item or items of a particular item type should be allowed to proceed. @param tableClass: An L{Item} subclass. @param comparison: A one-argument callable taking an attribute and returning an L{iaxiom.IComparison} describing the items to collect. @return: A C{bool} indicating whether deletion should be allowed. """ for cascadingAttr in (_disallows.get(tableClass, []) + _disallows.get(None, [])): for cascadedItem in store.query(cascadingAttr.type, comparisonFactory(cascadingAttr), limit=1): return False return True
Generate a dummy subclass of Item that will have the given attributes, and the base Item methods, but no methods of its own. This is for use with upgrading. @param typeName: a string, the Axiom TypeName to have attributes for. @param schemaVersion: an int, the (old) version of the schema this is a proxy for. @param attributes: a dict mapping {columnName: attr instance} describing the schema of C{typeName} at C{schemaVersion}. @param dummyBases: a sequence of 4-tuples of (baseTypeName, baseSchemaVersion, baseAttributes, baseBases) representing the dummy bases of this legacy class. def declareLegacyItem(typeName, schemaVersion, attributes, dummyBases=()): """ Generate a dummy subclass of Item that will have the given attributes, and the base Item methods, but no methods of its own. This is for use with upgrading. @param typeName: a string, the Axiom TypeName to have attributes for. @param schemaVersion: an int, the (old) version of the schema this is a proxy for. @param attributes: a dict mapping {columnName: attr instance} describing the schema of C{typeName} at C{schemaVersion}. @param dummyBases: a sequence of 4-tuples of (baseTypeName, baseSchemaVersion, baseAttributes, baseBases) representing the dummy bases of this legacy class. """ if (typeName, schemaVersion) in _legacyTypes: return _legacyTypes[typeName, schemaVersion] if dummyBases: realBases = [declareLegacyItem(*A) for A in dummyBases] else: realBases = (Item,) attributes = attributes.copy() attributes['__module__'] = 'item_dummy' attributes['__legacy__'] = True attributes['typeName'] = typeName attributes['schemaVersion'] = schemaVersion result = type(str('DummyItem<%s,%d>' % (typeName, schemaVersion)), realBases, attributes) assert result is not None, 'wtf, %r' % (type,) _legacyTypes[(typeName, schemaVersion)] = result return result
Class decorator for indicating a powerup's powerup interfaces. The class will also be declared as implementing the interface. @type iface: L{zope.interface.Interface} @param iface: The powerup interface. @type priority: int @param priority: The priority the powerup will be installed at. def empowerment(iface, priority=0): """ Class decorator for indicating a powerup's powerup interfaces. The class will also be declared as implementing the interface. @type iface: L{zope.interface.Interface} @param iface: The powerup interface. @type priority: int @param priority: The priority the powerup will be installed at. """ def _deco(cls): cls.powerupInterfaces = ( tuple(getattr(cls, 'powerupInterfaces', ())) + ((iface, priority),)) implementer(iface)(cls) return cls return _deco
Installs a powerup (e.g. plugin) on an item or store. Powerups will be returned in an iterator when queried for using the 'powerupsFor' method. Normally they will be returned in order of installation [this may change in future versions, so please don't depend on it]. Higher priorities are returned first. If you have something that should run before "normal" powerups, pass POWERUP_BEFORE; if you have something that should run after, pass POWERUP_AFTER. We suggest not depending too heavily on order of execution of your powerups, but if finer-grained control is necessary you may pass any integer. Normal (unspecified) priority is zero. Powerups will only be installed once on a given item. If you install a powerup for a given interface with priority 1, then again with priority 30, the powerup will be adjusted to priority 30 but future calls to powerupFor will still only return that powerup once. If no interface or priority are specified, and the class of the powerup has a "powerupInterfaces" attribute (containing either a sequence of interfaces, or a sequence of (interface, priority) tuples), this object will be powered up with the powerup object on those interfaces. If no interface or priority are specified and the powerup has a "__getPowerupInterfaces__" method, it will be called with an iterable of (interface, priority) tuples, collected from the "powerupInterfaces" attribute described above. The iterable of (interface, priority) tuples it returns will then be installed. @param powerup: an Item that implements C{interface} (if specified) @param interface: a zope interface, or None @param priority: An int; preferably either POWERUP_BEFORE, POWERUP_AFTER, or unspecified. @raise TypeError: raises if interface is IPowerupIndirector You may not install a powerup for IPowerupIndirector because that would be nonsensical. def powerUp(self, powerup, interface=None, priority=0): """ Installs a powerup (e.g. plugin) on an item or store. Powerups will be returned in an iterator when queried for using the 'powerupsFor' method. Normally they will be returned in order of installation [this may change in future versions, so please don't depend on it]. Higher priorities are returned first. If you have something that should run before "normal" powerups, pass POWERUP_BEFORE; if you have something that should run after, pass POWERUP_AFTER. We suggest not depending too heavily on order of execution of your powerups, but if finer-grained control is necessary you may pass any integer. Normal (unspecified) priority is zero. Powerups will only be installed once on a given item. If you install a powerup for a given interface with priority 1, then again with priority 30, the powerup will be adjusted to priority 30 but future calls to powerupFor will still only return that powerup once. If no interface or priority are specified, and the class of the powerup has a "powerupInterfaces" attribute (containing either a sequence of interfaces, or a sequence of (interface, priority) tuples), this object will be powered up with the powerup object on those interfaces. If no interface or priority are specified and the powerup has a "__getPowerupInterfaces__" method, it will be called with an iterable of (interface, priority) tuples, collected from the "powerupInterfaces" attribute described above. The iterable of (interface, priority) tuples it returns will then be installed. @param powerup: an Item that implements C{interface} (if specified) @param interface: a zope interface, or None @param priority: An int; preferably either POWERUP_BEFORE, POWERUP_AFTER, or unspecified. @raise TypeError: raises if interface is IPowerupIndirector You may not install a powerup for IPowerupIndirector because that would be nonsensical. """ if interface is None: for iface, priority in powerup._getPowerupInterfaces(): self.powerUp(powerup, iface, priority) elif interface is IPowerupIndirector: raise TypeError( "You cannot install a powerup for IPowerupIndirector: " + powerup) else: forc = self.store.findOrCreate(_PowerupConnector, item=self, interface=unicode(qual(interface)), powerup=powerup) forc.priority = priority
Remove a powerup. If no interface is specified, and the type of the object being installed has a "powerupInterfaces" attribute (containing either a sequence of interfaces, or a sequence of (interface, priority) tuples), the target will be powered down with this object on those interfaces. If this object has a "__getPowerupInterfaces__" method, it will be called with an iterable of (interface, priority) tuples. The iterable of (interface, priority) tuples it returns will then be uninstalled. (Note particularly that if powerups are added or removed to the collection described above between calls to powerUp and powerDown, more powerups or less will be removed than were installed.) def powerDown(self, powerup, interface=None): """ Remove a powerup. If no interface is specified, and the type of the object being installed has a "powerupInterfaces" attribute (containing either a sequence of interfaces, or a sequence of (interface, priority) tuples), the target will be powered down with this object on those interfaces. If this object has a "__getPowerupInterfaces__" method, it will be called with an iterable of (interface, priority) tuples. The iterable of (interface, priority) tuples it returns will then be uninstalled. (Note particularly that if powerups are added or removed to the collection described above between calls to powerUp and powerDown, more powerups or less will be removed than were installed.) """ if interface is None: for interface, priority in powerup._getPowerupInterfaces(): self.powerDown(powerup, interface) else: for cable in self.store.query(_PowerupConnector, AND(_PowerupConnector.item == self, _PowerupConnector.interface == unicode(qual(interface)), _PowerupConnector.powerup == powerup)): cable.deleteFromStore() return raise ValueError("Not powered up for %r with %r" % (interface, powerup))
Returns powerups installed using C{powerUp}, in order of descending priority. Powerups found to have been deleted, either during the course of this powerupsFor iteration, during an upgrader, or previously, will not be returned. def powerupsFor(self, interface): """ Returns powerups installed using C{powerUp}, in order of descending priority. Powerups found to have been deleted, either during the course of this powerupsFor iteration, during an upgrader, or previously, will not be returned. """ inMemoryPowerup = self._inMemoryPowerups.get(interface, None) if inMemoryPowerup is not None: yield inMemoryPowerup if self.store is None: return name = unicode(qual(interface), 'ascii') for cable in self.store.query( _PowerupConnector, AND(_PowerupConnector.interface == name, _PowerupConnector.item == self), sort=_PowerupConnector.priority.descending): pup = cable.powerup if pup is None: # this powerup was probably deleted during an upgrader. cable.deleteFromStore() else: indirector = IPowerupIndirector(pup, None) if indirector is not None: yield indirector.indirect(interface) else: yield pup
Return an iterator of the interfaces for which the given powerup is installed on this object. This is not implemented for in-memory powerups. It will probably fail in an unpredictable, implementation-dependent way if used on one. def interfacesFor(self, powerup): """ Return an iterator of the interfaces for which the given powerup is installed on this object. This is not implemented for in-memory powerups. It will probably fail in an unpredictable, implementation-dependent way if used on one. """ pc = _PowerupConnector for iface in self.store.query(pc, AND(pc.item == self, pc.powerup == powerup)).getColumn('interface'): yield namedAny(iface)
Collect powerup interfaces this object declares that it can be installed on. def _getPowerupInterfaces(self): """ Collect powerup interfaces this object declares that it can be installed on. """ powerupInterfaces = getattr(self.__class__, "powerupInterfaces", ()) pifs = [] for x in powerupInterfaces: if isinstance(x, type(Interface)): #just an interface pifs.append((x, 0)) else: #an interface and a priority pifs.append(x) m = getattr(self, "__getPowerupInterfaces__", None) if m is not None: pifs = m(pifs) try: pifs = [(i, p) for (i, p) in pifs] except ValueError: raise ValueError("return value from %r.__getPowerupInterfaces__" " not an iterable of 2-tuples" % (self,)) return pifs
Is this object currently valid as a reference? Objects which will be deleted in this transaction, or objects which are not in the same store are not valid. See attributes.reference.__get__. def _currentlyValidAsReferentFor(self, store): """ Is this object currently valid as a reference? Objects which will be deleted in this transaction, or objects which are not in the same store are not valid. See attributes.reference.__get__. """ if store is None: # If your store is None, you can refer to whoever you want. I'm in # a store but it doesn't matter that you're not. return True if self.store is not store: return False if self.__deletingObject: return False return True
Prepare each attribute in my schema for insertion into a given store, either by upgrade or by creation. This makes sure all references point to this store and all relative paths point to this store's files directory. def _schemaPrepareInsert(self, store): """ Prepare each attribute in my schema for insertion into a given store, either by upgrade or by creation. This makes sure all references point to this store and all relative paths point to this store's files directory. """ for name, atr in self.getSchema(): atr.prepareInsert(self, store)
Create and return a new instance from a row from the store. def existingInStore(cls, store, storeID, attrs): """Create and return a new instance from a row from the store.""" self = cls.__new__(cls) self.__justCreated = False self.__subinit__(__store=store, storeID=storeID, __everInserted=True) schema = self.getSchema() assert len(schema) == len(attrs), "invalid number of attributes" for data, (name, attr) in zip(attrs, schema): attr.loaded(self, data) self.activate() return self
return all persistent class attributes def getSchema(cls): """ return all persistent class attributes """ schema = [] for name, atr in cls.__attributes__: atr = atr.__get__(None, cls) if isinstance(atr, SQLAttribute): schema.append((name, atr)) cls.getSchema = staticmethod(lambda schema=schema: schema) return schema
Return a dictionary of all attributes which will be/have been/are being stored in the database. def persistentValues(self): """ Return a dictionary of all attributes which will be/have been/are being stored in the database. """ return dict((k, getattr(self, k)) for (k, attr) in self.getSchema())