code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def SETB(cpu, dest): dest.write(Operators.ITEBV(dest.size, cpu.CF, 1, 0))
Sets byte if below. :param cpu: current CPU. :param dest: destination operand.
def get_topology_info(*args): instance = tornado.ioloop.IOLoop.instance() try: return instance.run_sync(lambda: API.get_topology_info(*args)) except Exception: Log.debug(traceback.format_exc()) raise
Synced API call to get topology information
def get_user(self, user_id, depth=1): response = self._perform_request( '/um/users/%s?depth=%s' % (user_id, str(depth))) return response
Retrieves a single user by ID. :param user_id: The unique ID of the user. :type user_id: ``str`` :param depth: The depth of the response data. :type depth: ``int``
def _parse_documentclass(self): command = LatexCommand( 'documentclass', {'name': 'options', 'required': False, 'bracket': '['}, {'name': 'class_name', 'required': True, 'bracket': '{'}) try: parsed = next(command.parse(self._tex)) except StopItera...
Parse documentclass options. Sets the the ``_document_options`` attribute.
def create_item(self, token, name, parent_id, **kwargs): parameters = dict() parameters['token'] = token parameters['name'] = name parameters['parentid'] = parent_id optional_keys = ['description', 'uuid', 'privacy'] for key in optional_keys: if key in kwargs:...
Create an item to the server. :param token: A valid token for the user in question. :type token: string :param name: The name of the item to be created. :type name: string :param parent_id: The id of the destination folder. :type parent_id: int | long :param desc...
def indexesOptional(f): stack = inspect.stack() _NO_INDEX_CHECK_NEEDED.add('%s.%s.%s' % (f.__module__, stack[1][3], f.__name__)) del stack return f
Decorate test methods with this if you don't require strict index checking
def add_annotation_type(self, doc, annotation_type): if len(doc.annotations) != 0: if not self.annotation_type_set: self.annotation_type_set = True if validations.validate_annotation_type(annotation_type): doc.annotations[-1].annotation_type = anno...
Sets the annotation type. Raises CardinalityError if already set. OrderError if no annotator defined before. Raises SPDXValueError if invalid value.
def select(self, sql_string, cols, *args, **kwargs): working_columns = None if kwargs.get('columns') is not None: working_columns = kwargs.pop('columns') query = self._assemble_select(sql_string, cols, *args, *kwargs) return self._execute(query, working_columns=working_column...
Execute a SELECT statement :sql_string: An SQL string template :columns: A list of columns to be returned by the query :*args: Arguments to be passed for query parameters. :returns: Psycopg2 result
def monitor_wrapper(f, task_id, monitoring_hub_url, run_id, sleep_dur): def wrapped(*args, **kwargs): p = Process(target=monitor, args=(os.getpid(), task_id, monitoring_hub_url, run_id, sleep_dur)) p.start() try: return f(*args, **kwargs) finally: ...
Internal Wrap the Parsl app with a function that will call the monitor function and point it at the correct pid when the task begins.
def get_clan_image(self, obj: BaseAttrDict): try: badge_id = obj.clan.badge_id except AttributeError: try: badge_id = obj.badge_id except AttributeError: return 'https://i.imgur.com/Y3uXsgj.png' if badge_id is None: ...
Get the clan badge image URL Parameters --------- obj: official_api.models.BaseAttrDict An object that has the clan badge ID either in ``.clan.badge_id`` or ``.badge_id`` Can be a clan or a profile for example. Returns str
def init(self, hosts=None, cacert=None, client_cert=None, client_key=None): try: import etcd self.module = etcd except ImportError: pass if not self.module: return self._parse_jetconfig() hosts = env('PYCONFIG_ETCD_HOSTS', hosts) ...
Handle creating the new etcd client instance and other business. :param hosts: Host string or list of hosts (default: `'127.0.0.1:2379'`) :param cacert: CA cert filename (optional) :param client_cert: Client cert filename (optional) :param client_key: Client key filename (optional) ...
def _bufcountlines(filename, gzipped): if gzipped: fin = gzip.open(filename) else: fin = open(filename) nlines = 0 buf_size = 1024 * 1024 read_f = fin.read buf = read_f(buf_size) while buf: nlines += buf.count('\n') buf = read_f(buf_size) fin.close() r...
fast line counter. Used to quickly sum number of input reads when running link_fastqs to append files.
def route(bp, *args, **kwargs): kwargs['strict_slashes'] = kwargs.pop('strict_slashes', False) body = _validate_schema(kwargs.pop('_body', None)) query = _validate_schema(kwargs.pop('_query', None)) output = _validate_schema(kwargs.pop('marshal_with', None)) validate = kwargs.pop('validate', True) ...
Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (...
def _handle_variant(self): def the_func(a_tuple, variant=0): (signature, an_obj) = a_tuple (func, sig) = self.COMPLETE.parseString(signature)[0] assert sig == signature (xformed, _) = func(an_obj, variant=variant + 1) return (xformed, xformed.variant_l...
Generate the correct function for a variant signature. :returns: function that returns an appropriate value :rtype: ((str * object) or list)-> object
def property_derivative_T(self, T, P, zs, ws, order=1): r sorted_valid_methods = self.select_valid_methods(T, P, zs, ws) for method in sorted_valid_methods: try: return self.calculate_derivative_T(T, P, zs, ws, method, order) except: pass ...
r'''Method to calculate a derivative of a mixture property with respect to temperature at constant pressure and composition, of a given order. Methods found valid by `select_valid_methods` are attempted until a method succeeds. If no methods are valid and succeed, None is returned. ...
def _forbidden_attributes(obj): for key in list(obj.data.keys()): if key in list(obj.reserved_keys.keys()): obj.data.pop(key) return obj
Return the object without the forbidden attributes.
def _sorted_copy(self, comparison, reversed=False): sorted = self.copy() _list.sort(sorted, comparison) if reversed: _list.reverse(sorted) return sorted
Returns a sorted copy with the colors arranged according to the given comparison.
def create_template(material, path, show=False): file_name = 'dataset-%s.csv' % material.lower() file_path = os.path.join(path, file_name) with open(file_path, 'w', newline='') as csvfile: writer = csv.writer(csvfile, delimiter=',', quotechar='"', quot...
Create a template csv file for a data set. :param material: the name of the material :param path: the path of the directory where the file must be written :param show: a boolean indicating whether the created file should be \ displayed after creation
def detect(self): self.log.info("initializing AP detection on all sweeps...") t1=cm.timeit() for sweep in range(self.abf.sweeps): self.detectSweep(sweep) self.log.info("AP analysis of %d sweeps found %d APs (completed in %s)", self.abf.sweeps,len(self.AP...
runs AP detection on every sweep.
def create(self): dtype = NP_COMPONENT_DTYPE[self.component_type.value] data = numpy.frombuffer( self.buffer.read(byte_length=self.byte_length, byte_offset=self.byte_offset), count=self.count * self.components, dtype=dtype, ) return dtype, data
Create the VBO
def hide_variables_window(self): if self.var_window is not None: self.var_window.window.destroy() self.var_window = None
Hide the variables window
def unregister_fetcher(self, object_class): self._lock.acquire() try: cache = self._caches.get(object_class) if not cache: return cache.set_fetcher(None) finally: self._lock.release()
Unregister a fetcher class for an object class. :Parameters: - `object_class`: class retrieved by the fetcher. :Types: - `object_class`: `classobj`
def to_uint8(self): arr_0to255 = np.clip(np.round(self.arr_0to1 * 255), 0, 255) arr_uint8 = arr_0to255.astype(np.uint8) return arr_uint8
Convert this heatmaps object to a 0-to-255 array. Returns ------- arr_uint8 : (H,W,C) ndarray Heatmap as a 0-to-255 array (dtype is uint8).
def export_as_csv_action(description="Export selected objects as CSV file", fields=None, header=True): def export_as_csv(modeladmin, request, queryset): opts = modeladmin.model._meta if not fields: field_names = [field.name for field in opts.fields] else: field_names ...
Return an export csv action. Arguments: description (string): action description fields ([string]): list of model fields to include header (bool): whether or not to output the column names as the first row
def _get_param(self, param, allowed_values=None, optional=False): request_params = self._request_params() if param in request_params: value = request_params[param].lower() if allowed_values is not None: if value in allowed_values: self.params[p...
Get parameter in GET request.
def check_password(self, username, password, properties): logger.debug("check_password{0!r}".format( (username, password, properties))) pwd, pwd_format = self.get_password(username, (u"plain", u"md5:user:realm:password"), properties) ...
Check the password validity. Used by plain-text authentication mechanisms. Default implementation: retrieve a "plain" password for the `username` and `realm` using `self.get_password` and compare it with the password provided. May be overridden e.g. to check the password again...
def shuffle_node_data(graph: BELGraph, key: str, percentage: Optional[float] = None) -> BELGraph: percentage = percentage or 0.3 assert 0 < percentage <= 1 n = graph.number_of_nodes() swaps = int(percentage * n * (n - 1) / 2) result: BELGraph = graph.copy() for _ in range(swaps): s, t = ...
Shuffle the node's data. Useful for permutation testing. :param graph: A BEL graph :param key: The node data dictionary key :param percentage: What percentage of possible swaps to make
def set_device_id(self, dev, id): if id < 0 or id > 255: raise ValueError("ID must be an unsigned byte!") com, code, ok = io.send_packet( CMDTYPE.SETID, 1, dev, self.baudrate, 5, id) if not ok: raise_error(code)
Set device ID to new value. :param str dev: Serial device address/path :param id: Device ID to set
def power_btn(self, interval=200): if self.__power_btn_port is None: cij.err("cij.usb.relay: Invalid USB_RELAY_POWER_BTN") return 1 return self.__press(self.__power_btn_port, interval=interval)
TARGET power button
def manifest(): prune = options.paved.dist.manifest.prune graft = set() if options.paved.dist.manifest.include_sphinx_docroot: docroot = options.get('docroot', 'docs') graft.update([docroot]) if options.paved.dist.manifest.exclude_sphinx_builddir: builddir = docroot + '/'...
Guarantee the existence of a basic MANIFEST.in. manifest doc: http://docs.python.org/distutils/sourcedist.html#manifest `options.paved.dist.manifest.include`: set of files (or globs) to include with the `include` directive. `options.paved.dist.manifest.recursive_include`: set of files (or globs) to inclu...
def serialize_rules(self, rules): serialized = [] for rule in rules: direction = rule["direction"] source = '' destination = '' if rule.get("remote_ip_prefix"): prefix = rule["remote_ip_prefix"] if direction == "ingress": ...
Creates a payload for the redis server.
def has_degradation_increases_activity(data: Dict) -> bool: return part_has_modifier(data, SUBJECT, DEGRADATION) and part_has_modifier(data, OBJECT, ACTIVITY)
Check if the degradation of source causes activity of target.
def find_service(self, uuid): for service in self.list_services(): if service.uuid == uuid: return service return None
Return the first child service found that has the specified UUID. Will return None if no service that matches is found.
def load(self): self.create_effect_classes() self._add_resource_descriptions_to_pools(self.create_external_resources()) self._add_resource_descriptions_to_pools(self.create_resources()) for meta, resource in resources.textures.load_pool(): self._textures[meta.label] = re...
Loads this project instance
def process_incoming_tuples(self): if self.output_helper.is_out_queue_available(): self._read_tuples_and_execute() self.output_helper.send_out_tuples() else: self.bolt_metrics.update_out_queue_full_count()
Should be called when tuple was buffered into in_stream This method is equivalent to ``addBoltTasks()`` but is designed for event-driven single-thread bolt.
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, cls=None, indent=None, separators=None, encoding='utf-8', default=None, **kw): if (skipkeys is False and ensure_ascii is True and check_circular is True and allow_nan is True and cls is None and in...
Serialize ``obj`` to a JSON formatted ``str``. If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. If ``ensure_ascii`` is ``False``, then the return value w...
def pin_type(self, pin): if type(pin) is list: return [self.pin_type(p) for p in pin] pin_id = self._pin_mapping.get(pin, None) if pin_id: return self._pin_type(pin_id) else: raise KeyError('Requested pin is not mapped: %s' % pin)
Gets the `ahio.PortType` this pin was set to. If you're developing a driver, implement _pin_type(self, pin) @arg pin the pin you want to see the mode @returns the `ahio.PortType` the pin is set to @throw KeyError if pin isn't mapped.
def mkclick(freq, sr=22050, duration=0.1): times = np.arange(int(sr * duration)) click = np.sin(2 * np.pi * times * freq / float(sr)) click *= np.exp(- times / (1e-2 * sr)) return click
Generate a click sample. This replicates functionality from mir_eval.sonify.clicks, but exposes the target frequency and duration.
def _fourier_func(fourierparams, phase, mags): order = int(len(fourierparams)/2) f_amp = fourierparams[:order] f_pha = fourierparams[order:] f_orders = [f_amp[x]*npcos(2.0*pi_value*x*phase + f_pha[x]) for x in range(order)] total_f = npmedian(mags) for fo in f_orders: tot...
This returns a summed Fourier cosine series. Parameters ---------- fourierparams : list This MUST be a list of the following form like so:: [period, epoch, [amplitude_1, amplitude_2, amplitude_3, ..., amplitude_X], [phase_1, phase_2, phase_3, ......
def reverse_dummies(self, X, mapping): out_cols = X.columns.values.tolist() mapped_columns = [] for switch in mapping: col = switch.get('col') mod = switch.get('mapping') insert_at = out_cols.index(mod.columns[0]) X.insert(insert_at, col, 0) ...
Convert dummy variable into numerical variables Parameters ---------- X : DataFrame mapping: list-like Contains mappings of column to be transformed to it's new columns and value represented Returns ------- numerical: DataFrame
def doStuff(ABFfolder,analyze=False,convert=False,index=True,overwrite=True, launch=True): IN=INDEX(ABFfolder) if analyze: IN.analyzeAll() if convert: IN.convertImages()
Inelegant for now, but lets you manually analyze every ABF in a folder.
def cache_func(prefix, method=False): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): cache_args = args if method: cache_args = args[1:] cache_key = get_cache_key(prefix, *cache_args, **kwargs) cached_value = cache.get(c...
Cache result of function execution into the django cache backend. Calculate cache key based on `prefix`, `args` and `kwargs` of the function. For using like object method set `method=True`.
def clean_channel_worker_username(self): channel_worker_username = self.cleaned_data['channel_worker_username'].strip() try: User.objects.get(username=channel_worker_username) except User.DoesNotExist: raise ValidationError( ValidationMessages.INVALID_CHAN...
Clean enterprise channel worker user form field Returns: str: the cleaned value of channel user username for transmitting courses metadata.
def save_photon_hdf5(self, identity=None, overwrite=True, path=None): filepath = self.filepath if path is not None: filepath = Path(path, filepath.name) self.merge_da() data = self._make_photon_hdf5(identity=identity) phc.hdf5.save_photon_hdf5(data, h5_fname=str(filep...
Create a smFRET Photon-HDF5 file with current timestamps.
def channel_axis(self, batch): axis = self.__model.channel_axis() if not batch: axis = axis - 1 return axis
Interface to model.channel_axis for attacks. Parameters ---------- batch : bool Controls whether the index of the axis for a batch of images (4 dimensions) or a single image (3 dimensions) should be returned.
def calculate_integral_over_T(self, T1, T2, method): r return float(quad(lambda T: self.calculate(T, method)/T, T1, T2)[0])
r'''Method to calculate the integral of a property over temperature with respect to temperature, using a specified method. Uses SciPy's `quad` function to perform the integral, with no options. This method can be overwritten by subclasses who may perfer to add analytical method...
def node_get_args(node): obj = node[OBJ] key = node[KEY] boundargs = obj.formula.signature.bind(*key) boundargs.apply_defaults() return boundargs.arguments
Return an ordered mapping from params to args
def memoize(func): class Memodict(dict): def __getitem__(self, *key): return dict.__getitem__(self, key) def __missing__(self, key): ret = self[key] = func(*key) return ret return Memodict().__getitem__
Memoization decorator for a function taking one or more arguments.
def amount(self): return sum(self.get_compound_amount(c) for c in self.material.compounds)
Determine the sum of mole amounts of all the compounds. :returns: Amount. [kmol]
def start(self): start = time.time() self._kill_event = threading.Event() self.procs = {} for worker_id in range(self.worker_count): p = multiprocessing.Process(target=worker, args=(worker_id, self.uid, ...
Start the worker processes. TODO: Move task receiving to a thread
def load(self): self._open_image() components, data = image_data(self.image) texture = self.ctx.texture( self.image.size, components, data, ) texture.extra = {'meta': self.meta} if self.meta.mipmap: texture.build_m...
Load a 2d texture
def from_der_data(cls, data): logger.debug("Decoding DER certificate: {0!r}".format(data)) if cls._cert_asn1_type is None: cls._cert_asn1_type = Certificate() cert = der_decoder.decode(data, asn1Spec = cls._cert_asn1_type)[0] result = cls() tbs_cert = cert.getComponen...
Decode DER-encoded certificate. :Parameters: - `data`: the encoded certificate :Types: - `data`: `bytes` :Return: decoded certificate data :Returntype: ASN1CertificateData
async def add_user(self, add_user_request): response = hangouts_pb2.AddUserResponse() await self._pb_request('conversations/adduser', add_user_request, response) return response
Invite users to join an existing group conversation.
def deploy_code(self): assert self.genv.SITE, 'Site unspecified.' assert self.genv.ROLE, 'Role unspecified.' r = self.local_renderer if self.env.exclusions: r.env.exclusions_str = ' '.join( "--exclude='%s'" % _ for _ in self.env.exclusions) r.local(r.e...
Generates a rsync of all deployable code.
def _slotnames(cls): names = cls.__dict__.get("__slotnames__") if names is not None: return names names = [] if not hasattr(cls, "__slots__"): pass else: for c in cls.__mro__: if "__slots__" in c.__dict__: slots = c.__dict__['__slots__'] ...
Return a list of slot names for a given class. This needs to find slots defined by the class and its bases, so we can't simply return the __slots__ attribute. We must walk down the Method Resolution Order and concatenate the __slots__ of each class found there. (This assumes classes don't modify thei...
def transitions(self, return_matrix=True): if return_matrix: mat = np.zeros((self.nV, self.nV)) for v in self.g.nodes(): ind = [e[1] for e in sorted(self.g.out_edges(v))] mat[v, ind] = self._route_probs[v] else: mat = { ...
Returns the routing probabilities for each vertex in the graph. Parameters ---------- return_matrix : bool (optional, the default is ``True``) Specifies whether an :class:`~numpy.ndarray` is returned. If ``False``, a dict is returned instead. Returns ...
def shellcmd(repo, args): with cd(repo.rootdir): result = run(args) return result
Run a shell command within the repo's context Parameters ---------- repo: Repository object args: Shell command
def get_self(session, user_details=None): if user_details: user_details['compact'] = True response = make_get_request(session, 'self', params_data=user_details) json_data = response.json() if response.status_code == 200: return json_data['result'] else: raise SelfNotRetrieved...
Get details about the currently authenticated user
def copy(self): return self.__class__( amount=self["amount"], asset=self["asset"].copy(), blockchain_instance=self.blockchain, )
Copy the instance and make sure not to use a reference
def node_theta(self, node): group = self.find_node_group_membership(node) return self.group_theta(group)
Convenience function to find the node's theta angle.
def validate_key(self, key): if not models.PasswordResetToken.valid_tokens.filter(key=key).exists(): raise serializers.ValidationError( _("The provided reset token does not exist, or is expired.") ) return key
Validate the provided reset key. Returns: The validated key. Raises: serializers.ValidationError: If the provided key does not exist.
def to_message(self): from .messages import ack return ack.Acknowledgement(self.code, self.args[0] if len(self.args) > 0 else '')
Creates an error Acknowledgement message. The message's code and message are taken from this exception. :return: the message representing this exception
def add_scalar_data(self, token, community_id, producer_display_name, metric_name, producer_revision, submit_time, value, **kwargs): parameters = dict() parameters['token'] = token parameters['communityId'] = community_id parameters['produc...
Create a new scalar data point. :param token: A valid token for the user in question. :type token: string :param community_id: The id of the community that owns the producer. :type community_id: int | long :param producer_display_name: The display name of the producer. :...
def requireAnomalyModel(func): @wraps(func) def _decorator(self, *args, **kwargs): if not self.getInferenceType() == InferenceType.TemporalAnomaly: raise RuntimeError("Method required a TemporalAnomaly model.") if self._getAnomalyClassifier() is None: raise RuntimeError("Model does not support t...
Decorator for functions that require anomaly models.
def paginate_link_tag(item): a_tag = Page.default_link_tag(item) if item['type'] == 'current_page': return make_html_tag('li', a_tag, **{'class': 'blue white-text'}) return make_html_tag('li', a_tag)
Create an A-HREF tag that points to another page usable in paginate.
def read(cls, proto): tm = super(TemporalMemoryMonitorMixin, cls).read(proto) tm.mmName = None tm._mmTraces = None tm._mmData = None tm.mmClearHistory() tm._mmResetActive = True return tm
Intercepts TemporalMemory deserialization request in order to initialize `TemporalMemoryMonitorMixin` state @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory shim instance
def _rindex(mylist: Sequence[T], x: T) -> int: return len(mylist) - mylist[::-1].index(x) - 1
Index of the last occurrence of x in the sequence.
def get_form_field_class(model_field): FIELD_MAPPING = { IntField: forms.IntegerField, StringField: forms.CharField, FloatField: forms.FloatField, BooleanField: forms.BooleanField, DateTimeField: forms.DateTimeField, DecimalField: forms.DecimalField, URLField:...
Gets the default form field for a mongoenigne field.
def load(self): data = self.get_data('floating_ips/%s' % self.ip, type=GET) floating_ip = data['floating_ip'] for attr in floating_ip.keys(): setattr(self, attr, floating_ip[attr]) return self
Load the FloatingIP object from DigitalOcean. Requires self.ip to be set.
def disableTap(self): if self._tapFileIn is not None: self._tapFileIn.close() self._tapFileIn = None if self._tapFileOut is not None: self._tapFileOut.close() self._tapFileOut = None
Disable writing of output tap files.
async def await_event(self, event=None, timeout=30): return await self._protocol.await_event(event, timeout=timeout)
Wait for an event from QTM. :param event: A :class:`qtm.QRTEvent` to wait for a specific event. Otherwise wait for any event. :param timeout: Max time to wait for event. :rtype: A :class:`qtm.QRTEvent`
def _make_celery_app(config): config.registry.celery_app.conf['pyramid_config'] = config return config.registry.celery_app
This exposes the celery app. The app is actually created as part of the configuration. However, this does make the celery app functional as a stand-alone celery application. This puts the pyramid configuration object on the celery app to be used for making the registry available to tasks running inside...
def print_state(self): def tile_string(value): if value > 0: return '% 5d' % (2 ** value,) return " " separator_line = '-' * 25 print(separator_line) for row in range(4): print("|" + "|".join([tile_string(v) for v in self._state[row...
Prints the current state.
def remove_extension(module, name, code): key = (module, name) if (_extension_registry.get(key) != code or _inverted_registry.get(code) != key): raise ValueError("key %s is not registered with code %s" % (key, code)) del _extension_registry[key] del _inverted_reg...
Unregister an extension code. For testing only.
def parse_uri(self, raw_uri, recursive): if recursive: raw_uri = directory_fmt(raw_uri) file_provider = self.parse_file_provider(raw_uri) self._validate_paths_or_fail(raw_uri, recursive) uri, docker_uri = self.rewrite_uris(raw_uri, file_provider) uri_parts = job_model.UriParts( directo...
Return a valid docker_path, uri, and file provider from a flag value.
def _MergeMessageField(self, tokenizer, message, field): is_map_entry = _IsMapEntry(field) if tokenizer.TryConsume('<'): end_token = '>' else: tokenizer.Consume('{') end_token = '}' if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: if field.is_extension: sub_me...
Merges a single scalar field into a message. Args: tokenizer: A tokenizer to parse the field value. message: The message of which field is a member. field: The descriptor of the field to be merged. Raises: ParseError: In case of text parsing problems.
def _c2x(self, c): return 0.5 * (self.window[0] + self.window[1] + c * (self.window[1] - self.window[0]))
Convert cheb coordinates to windowdow coordinates
def indexOf(a, b): "Return the first index of b in a." for i, j in enumerate(a): if j == b: return i else: raise ValueError('sequence.index(x): x not in sequence')
Return the first index of b in a.
def pixel_wise_softmax(x, name='pixel_wise_softmax'): with tf.name_scope(name): return tf.nn.softmax(x)
Return the softmax outputs of images, every pixels have multiple label, the sum of a pixel is 1. Usually be used for image segmentation. Parameters ---------- x : Tensor input. - For 2d image, 4D tensor (batch_size, height, weight, channel), where channel >= 2. - For 3d...
def rename(self, new_name): return self.get_data( "images/%s" % self.id, type=PUT, params={"name": new_name} )
Rename an image
def extract_scheduler_location(self, topology): schedulerLocation = { "name": None, "http_endpoint": None, "job_page_link": None, } if topology.scheduler_location: schedulerLocation["name"] = topology.scheduler_location.topology_name schedulerLocation["http_endpoint"] = t...
Returns the representation of scheduler location that will be returned from Tracker.
def value_from_datadict(self, *args, **kwargs): value = super(RichTextWidget, self).value_from_datadict( *args, **kwargs) if value is not None: value = self.get_sanitizer()(value) return value
Pass the submitted value through the sanitizer before returning it.
def prune_unused_metabolites(cobra_model): output_model = cobra_model.copy() inactive_metabolites = [m for m in output_model.metabolites if len(m.reactions) == 0] output_model.remove_metabolites(inactive_metabolites) return output_model, inactive_metabolites
Remove metabolites that are not involved in any reactions and returns pruned model Parameters ---------- cobra_model: class:`~cobra.core.Model.Model` object the model to remove unused metabolites from Returns ------- output_model: class:`~cobra.core.Model.Model` object inpu...
def auth_properties(self): props = dict(self.settings["extra_auth_properties"]) if self.transport: props.update(self.transport.auth_properties) props["local-jid"] = self.me props["service-type"] = "xmpp" return props
Authentication properties of the stream. Derived from the transport with 'local-jid' and 'service-type' added.
def is_partly_within_image(self, image): return not self.is_out_of_image(image, fully=True, partly=False)
Estimate whether the polygon is at least partially inside the image area. Parameters ---------- image : (H,W,...) ndarray or tuple of int Image dimensions to use. If an ndarray, its shape will be used. If a tuple, it is assumed to represent the image shape an...
def send(self, str, end='\n'): return self._process.stdin.write(str+end)
Sends a line to std_in.
def get(self): if self.is_shutdown(): return None while len(self._states) == 0: if self.running == 0: return None if self.is_shutdown(): return None logger.debug("Waiting for available states") self._lock.wait() ...
Dequeue a state with the max priority
def setup_ipython(self): if self.is_ipysetup: return from ipykernel.kernelapp import IPKernelApp self.shell = IPKernelApp.instance().shell if not self.shell and is_ipython(): self.shell = get_ipython() if self.shell: shell_class = type(self.she...
Monkey patch shell's error handler. This method is to monkey-patch the showtraceback method of IPython's InteractiveShell to __IPYTHON__ is not detected when starting an IPython kernel, so this method is called from start_kernel in spyder-modelx.
def install(self): if not self.is_valid: raise PolyaxonDeploymentConfigError( 'Deployment type `{}` not supported'.format(self.deployment_type)) if self.is_kubernetes: self.install_on_kubernetes() elif self.is_docker_compose: self.install_on_do...
Install polyaxon using the current config to the correct platform.
def do_run_1(self): while not self.check_terminate(): self._has_run = True self._run1() self._num_iter += 1; self._inner_run_counter += 1
LM run, evaluating 1 step at a time. Broyden or eigendirection updates replace full-J updates until a full-J update occurs. Does not run with the calculated J (no internal run).
def unmap_memory_callback(self, start, size): logger.info(f"Unmapping memory from {hex(start)} to {hex(start + size)}") mask = (1 << 12) - 1 if (start & mask) != 0: logger.error("Memory to be unmapped is not aligned to a page") if (size & mask) != 0: size = ((size...
Unmap Unicorn maps when Manticore unmaps them
def curl(self, url, post): try: req = urllib2.Request(url) req.add_header("Content-type", "application/xml") data = urllib2.urlopen(req, post.encode('utf-8')).read() except urllib2.URLError, v: raise AmbientSMSError(v) return dictFromXml(data)
Inteface for sending web requests to the AmbientSMS API Server
def get_role_name(region, account_id, role): prefix = ARN_PREFIXES.get(region, 'aws') return 'arn:{0}:iam::{1}:role/{2}'.format(prefix, account_id, role)
Shortcut to insert the `account_id` and `role` into the iam string.
def set_pkg_desc(self, doc, text): self.assert_package_exists() if not self.package_desc_set: self.package_desc_set = True doc.package.description = text else: raise CardinalityError('Package::Description')
Set's the package's description. Raises CardinalityError if description already set. Raises OrderError if no package previously defined.
def push(self, message): if self._ignore_event(message): return None, None args = self._parse_message(message) self.log.debug("Searching for command using chunks: %s", args) cmd, msg_args = self._find_longest_prefix_command(args) if cmd is not None: if mes...
Takes a SlackEvent, parses it for a command, and runs against registered plugin
def show_image(setter, width, height, image_path='', image_obj=None, offset=(0, 0), bgcolor=COLORS.Off, brightness=255): bgcolor = color_scale(bgcolor, brightness) img = image_obj if image_path and not img: from PIL import Image img = Image.open(image_path) ...
Display an image on a matrix.
def __validateExperimentControl(self, control): taskList = control.get('tasks', None) if taskList is not None: taskLabelsList = [] for task in taskList: validateOpfJsonValue(task, "opfTaskSchema.json") validateOpfJsonValue(task['taskControl'], "opfTaskControlSchema.json") tas...
Validates control dictionary for the experiment context
def list_move_to_front(l,value='other'): l=list(l) if value in l: l.remove(value) l.insert(0,value) return l
if the value is in the list, move it to the front and return it.
def lookup_stdout(self, pk=None, start_line=None, end_line=None, full=True): uj_res = get_resource('unified_job') query_params = (('unified_job_node__workflow_job', pk), ('order_by', 'finished'), ('status__in', 'successful,failed,erro...
Internal method that lies to our `monitor` method by returning a scorecard for the workflow job where the standard out would have been expected.
def getbalance(self, url='http://services.ambientmobile.co.za/credits'): postXMLList = [] postXMLList.append("<api-key>%s</api-key>" % self.api_key) postXMLList.append("<password>%s</password>" % self.password) postXML = '<sms>%s</sms>' % "".join(postXMLList) result = self.curl(u...
Get the number of credits remaining at AmbientSMS