code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def parse_filter(self, filters): for filter_type in filters: if filter_type == 'or' or filter_type == 'and': conditions = [] for field in filters[filter_type]: if self.is_field_allowed(field): conditions.append(self.create_q...
This method process the filters
def handle(self, *args, **kwargs): cutoff = timezone.now() cutoff -= app_settings.CONFIRMATION_EXPIRATION cutoff -= app_settings.CONFIRMATION_SAVE_PERIOD queryset = models.EmailConfirmation.objects.filter( created_at__lte=cutoff ) count = queryset.count() ...
Handle execution of the command.
def set_time(self, value: float): if value < 0: value = 0 self.offset += self.get_time() - value
Set the current time. This can be used to jump in the timeline. Args: value (float): The new time
def amounts(masses): return {compound: amount(compound, masses[compound]) for compound in masses.keys()}
Calculate the amounts from the specified compound masses. :param masses: [kg] dictionary, e.g. {'SiO2': 3.0, 'FeO': 1.5} :returns: [kmol] dictionary
def create_or_update(self, store_id, product_id, variant_id, data): self.store_id = store_id self.product_id = product_id self.variant_id = variant_id if 'id' not in data: raise KeyError('The product variant must have an id') if 'title' not in data: raise...
Add or update a product variant. :param store_id: The store id. :type store_id: :py:class:`str` :param product_id: The id for the product of a store. :type product_id: :py:class:`str` :param variant_id: The id for the product variant. :type variant_id: :py:class:`str` ...
def get_group_tabs(self): if self.tab_group is None: raise ImproperlyConfigured( "%s requires a definition of 'tab_group'" % self.__class__.__name__) group_members = [t for t in self._registry if t.tab_group == self.tab_group] return [t() for t in grou...
Return instances of all other tabs that are members of the tab's tab group.
def pdw_worker(task): frequency = task[0] times, modmags = task[1], task[2] fold_time = task[3] j_range = range(task[4]) keep_threshold_1 = task[5] keep_threshold_2 = task[6] phasebinsize = task[7] try: period = 1.0/frequency phased = phase_magseries(times, ...
This is the parallel worker for the function below. task[0] = frequency for this worker task[1] = times array task[2] = mags array task[3] = fold_time task[4] = j_range task[5] = keep_threshold_1 task[6] = keep_threshold_2 task[7] = phasebinsize we don't need errs for the worker.
def _get_repo_info(self, environ, rev, reload=False): caches = environ.setdefault("wsgidav.hg.cache", {}) if caches.get(compat.to_native(rev)) is not None: _logger.debug("_get_repo_info(%s): cache hit." % rev) return caches[compat.to_native(rev)] start_time = time.time() ...
Return a dictionary containing all files under source control. dirinfos: Dictionary containing direct members for every collection. {folderpath: (collectionlist, filelist), ...} files: Sorted list of all file paths in the manifest. filedict: Dicti...
def list(self, root=False, **kwargs): if kwargs.get('parent', None): self.set_child_endpoint(parent=kwargs['parent'], inventory=kwargs.get('inventory', None)) kwargs.pop('parent') if root and not kwargs.get('inventory', None): raise exc.UsageError('The --root option r...
Return a list of groups. =====API DOCS===== Retrieve a list of groups. :param root: Flag that if set, only root groups of a specific inventory will be listed. :type root: bool :param parent: Primary key or name of the group whose child groups will be listed. :type paren...
def nmap_scan(): hs = HostSearch() config = Config() nmap_types = ['top10', 'top100', 'custom', 'top1000', 'all'] options = {'top10':'--top-ports 10', 'top100':'--top-ports 100', 'custom': config.get('nmap', 'options'), 'top1000': '--top-ports 1000', 'all': '-p-'} hs_parser = hs.argparser argpar...
Scans the given hosts with nmap.
def STRD(cpu, src1, src2, dest, offset=None): assert src1.type == 'register' assert src2.type == 'register' assert dest.type == 'memory' val1 = src1.read() val2 = src2.read() writeback = cpu._compute_writeback(dest, offset) cpu.write_int(dest.address(), val1, 32) ...
Writes the contents of two registers to memory.
def emph(txt, rval=None): if rval is None: info(txt) elif rval == 0: good(txt) else: err(txt)
Print, emphasized based on rval
def get(self, *args, **kwargs): try: req_func = self.session.get if self.session else requests.get req = req_func(*args, **kwargs) req.raise_for_status() self.failed_last = False return req except requests.exceptions.RequestException as ...
An interface for get requests that handles errors more gracefully to prevent data loss
def _get_job_resources(args): logging = param_util.build_logging_param( args.logging) if args.logging else None timeout = param_util.timeout_in_seconds(args.timeout) log_interval = param_util.log_interval_in_seconds(args.log_interval) return job_model.Resources( min_cores=args.min_cores, min_r...
Extract job-global resources requirements from input args. Args: args: parsed command-line arguments Returns: Resources object containing the requested resources for the job
def brightness(self, value=1.0): b = ImageEnhance.Brightness(self.img) self.img = b.enhance(value)
Increases or decreases the brightness in the layer. The given value is a percentage to increase or decrease the image brightness, for example 0.8 means brightness at 80%.
def insert(self, index, object): self._check(object.id) list.insert(self, index, object) _dict = self._dict for i, j in iteritems(_dict): if j >= index: _dict[i] = j + 1 _dict[object.id] = index
insert object before index
def quit(self): if self._process is None: logger.debug('Quit was called after self._process had already been released') return try: logger.debug('Quitting OMXPlayer') process_group_id = os.getpgid(self._process.pid) os.killpg(process_group_id, ...
Quit the player, blocking until the process has died
def clean_all(G, settings): quiet = settings["quiet"] recon = settings["recon"] sprint = settings["sprint"] error = settings["error"] all_outputs = [] for node in G.nodes(data=True): if "output" in node[1]: for item in get_all_outputs(node[1]): all_outputs.app...
Removes all the output files from all targets. Takes the graph as the only argument Args: The networkx graph object The settings dictionary Returns: 0 if successful 1 if removing even one file failed
def CheckPosixThreading(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST: if Search(pattern, line): error(filename, linenum, 'runtime/threadsafe_fn', 2, 'Consider using ' + multithread_safe_func...
Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when us...
def _apply_assert_methods(self, i, r, summarize=False, report_unexpected_exceptions=True, context=None): for a in dir(self): if a.startswith('assert'): rdict = self._as_dict(r) f...
Apply 'assert' methods on `r`.
def run_migrations_offline(): context.configure(url=neutron_config.database.connection) with context.begin_transaction(): context.run_migrations()
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
def weighted_sample(bn, e): w = 1 event = dict(e) for node in bn.nodes: Xi = node.variable if Xi in e: w *= node.p(e[Xi], event) else: event[Xi] = node.sample(event) return event, w
Sample an event from bn that's consistent with the evidence e; return the event and its weight, the likelihood that the event accords to the evidence.
def _format_range_dt(self, d): if not isinstance(d, six.string_types): d = d.isoformat() return '{0}||/{1}'.format( d, self.dt_rounding_map[self.aggregation_interval])
Format range filter datetime to the closest aggregation interval.
def sort_pem_objects(pem_objects): keys, certs, ca_certs = [], [], [] for pem_object in pem_objects: if isinstance(pem_object, pem.Key): keys.append(pem_object) else: if _is_ca(pem_object): ca_certs.append(pem_object) else: cert...
Given a list of pem objects, sort the objects into the private key, leaf certificate, and list of CA certificates in the trust chain. This function assumes that the list of pem objects will contain exactly one private key and exactly one leaf certificate and that only key and certificate type objects ar...
async def set_typing(self, set_typing_request): response = hangouts_pb2.SetTypingResponse() await self._pb_request('conversations/settyping', set_typing_request, response) return response
Set the typing status of a conversation.
def one(func, n=0): def _one(result): if _isSequenceTypeNotText(result) and len(result) > n: return func(result[n]) return None return maybe(_one)
Create a callable that applies ``func`` to a value in a sequence. If the value is not a sequence or is an empty sequence then ``None`` is returned. :type func: `callable` :param func: Callable to be applied to each result. :type n: `int` :param n: Index of the value to apply ``func`` to.
def write_int(self, where, expression, size=None, force=False): if size is None: size = self.address_bit_size assert size in SANE_SIZES self._publish('will_write_memory', where, expression, size) data = [Operators.CHR(Operators.EXTRACT(expression, offset, 8)) for offset in ra...
Writes int to memory :param int where: address to write to :param expr: value to write :type expr: int or BitVec :param size: bit size of `expr` :param force: whether to ignore memory permissions
def adopt(self, old_parent, new_parent): try: old_id = old_parent['attributes']['ID'] except TypeError: try: old_id = self.lines[old_parent]['attributes']['ID'] except TypeError: old_id = old_parent old_feature = self.features[o...
Transfer children from old_parent to new_parent :param old_parent: feature_id(str) or line_index(int) or line_data(dict) or feature :param new_parent: feature_id(str) or line_index(int) or line_data(dict) :return: List of children transferred
def play(self, call_params): path = '/' + self.api_version + '/Play/' method = 'POST' return self.request(path, method, call_params)
REST Play something on a Call Helper
def parse_response(gdb_mi_text): stream = StringStream(gdb_mi_text, debug=_DEBUG) if _GDB_MI_NOTIFY_RE.match(gdb_mi_text): token, message, payload = _get_notify_msg_and_payload(gdb_mi_text, stream) return { "type": "notify", "message": message, "payload": payl...
Parse gdb mi text and turn it into a dictionary. See https://sourceware.org/gdb/onlinedocs/gdb/GDB_002fMI-Stream-Records.html#GDB_002fMI-Stream-Records for details on types of gdb mi output. Args: gdb_mi_text (str): String output from gdb Returns: dict with the following keys: ...
def dict_union(*args): if not args: return {} else: dictclass = OrderedDict if isinstance(args[0], OrderedDict) else dict return dictclass(it.chain.from_iterable(d.items() for d in args))
Combines the disjoint keys in multiple dictionaries. For intersecting keys, dictionaries towards the end of the sequence are given precedence. Args: *args : a sequence of dictionaries Returns: Dict | OrderedDict : OrderedDict if the first argument is an OrderedDict, otherwise d...
def get_top_assets(self): images = self.get_all_images()[0:14] video = [] if supports_video: video = self.eventvideo_set.all()[0:10] return list(chain(images, video))[0:15]
Gets images and videos to populate top assets. Map is built separately.
def update_subject_categories(self, primary, secondary, kb): category_fields = record_get_field_instances(self.record, tag='650', ind1='1', ind2='7') ...
650 Translate Categories.
def _dissociate_gene(self, cobra_gene): self._genes.discard(cobra_gene) cobra_gene._reaction.discard(self)
Dissociates a cobra.Gene object with a cobra.Reaction. Parameters ---------- cobra_gene : cobra.core.Gene.Gene
def sim(src, tar, method=sim_levenshtein): if callable(method): return method(src, tar) else: raise AttributeError('Unknown similarity function: ' + str(method))
Return a similarity of two strings. This is a generalized function for calling other similarity functions. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison method : function Specifies the similarity metric (:py:func:`si...
def nodes_to_check(self, docs): nodes_to_check = [] for doc in docs: for tag in ['p', 'pre', 'td']: items = self.parser.getElementsByTag(doc, tag=tag) nodes_to_check += items return nodes_to_check
\ returns a list of nodes we want to search on like paragraphs and tables
def get_provider(args, resources): provider = getattr(args, 'provider', 'google') if provider == 'google': return google.GoogleJobProvider( getattr(args, 'verbose', False), getattr(args, 'dry_run', False), args.project) elif provider == 'google-v2': return google_v2.GoogleV2JobProvider( ...
Returns a provider for job submission requests.
def on_tool_finish(self, tool): with self._lock: if tool in self.current_tools: self.current_tools.remove(tool) self.completed_tools.append(tool)
Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str
def geometry_from_grid(self, grid, pixel_centres, pixel_neighbors, pixel_neighbors_size, buffer=1e-8): y_min = np.min(grid[:, 0]) - buffer y_max = np.max(grid[:, 0]) + buffer x_min = np.min(grid[:, 1]) - buffer x_max = np.max(grid[:, 1]) + buffer shape_arcsec = (y_max - y_min, x_...
Determine the geometry of the Voronoi pixelization, by alligning it with the outer-most coordinates on a \ grid plus a small buffer. Parameters ----------- grid : ndarray The (y,x) grid of coordinates which determine the Voronoi pixelization's geometry. pixel_centres...
def handle_enterprise_logistration(backend, user, **kwargs): request = backend.strategy.request enterprise_customer = get_enterprise_customer_for_running_pipeline( request, { 'backend': backend.name, 'kwargs': kwargs } ) if enterprise_customer is None: ...
Perform the linking of user in the process of logging to the Enterprise Customer. Args: backend: The class handling the SSO interaction (SAML, OAuth, etc) user: The user object in the process of being logged in with **kwargs: Any remaining pipeline variables
def get_project_by_id(session, project_id, project_details=None, user_details=None): query = {} if project_details: query.update(project_details) if user_details: query.update(user_details) response = make_get_request( session, 'projects/{}'.format(project_id), params_data=query)...
Get a single project by ID
def run(exercise, command): Popen(['nohup', command, exercise.path()], stdout=DEVNULL, stderr=DEVNULL)
Spawns a process with `command path-of-exercise`
def flatten_dtype(dtype, _next=None): types = [] if _next is None: _next = [0, ''] primary = True else: primary = False prefix = _next[1] if dtype.names is None: for i in numpy.ndindex(dtype.shape): if dtype.base == dtype: types.append(('%...
Unpack a structured data-type.
def handle_upload(self, request): if request.method != 'POST': raise Http404 if request.is_ajax(): try: filename = request.GET['quillUploadFile'] data = request is_raw = True except KeyError: return HttpR...
Handle file uploads from WYSIWYG.
def pin_direction(self, pin): if type(pin) is list: return [self.pin_direction(p) for p in pin] pin_id = self._pin_mapping.get(pin, None) if pin_id: return self._pin_direction(pin_id) else: raise KeyError('Requested pin is not mapped: %s' % pin)
Gets the `ahio.Direction` this pin was set to. If you're developing a driver, implement _pin_direction(self, pin) @arg pin the pin you want to see the mode @returns the `ahio.Direction` the pin is set to @throw KeyError if pin isn't mapped.
def build_pipeline_args(cls, project, script, job_params, task_params, reserved_labels, preemptible, logging_uri, scopes, keep_alive): inputs = {} inputs.update({SCRIPT_VARNAME: script}) inputs.update({ var.name: var.value for var in job_pa...
Builds pipeline args for execution. Args: project: string name of project. script: Body of the script to execute. job_params: dictionary of values for labels, envs, inputs, and outputs for this job. task_params: dictionary of values for labels, envs, inputs, and outputs ...
def __intermediate_addresses(self, interface): address_list = self.get_copy(interface, 'addresses') if not address_list: return [{'proto': 'none'}] result = [] static = {} dhcp = [] for address in address_list: family = address.get('family') ...
converts NetJSON address to UCI intermediate data structure
def set_config(self, config): if not isinstance(config, dict): raise TypeError("Argument to set_config needs to be dict, given: %s" % str(config)) self._topology_config = config
Set topology-wide configuration to the topology :type config: dict :param config: topology-wide config
def string(self, string): object_ = json.loads(string) return self.object(object_)
Load an object from a string and return the processed JSON content :return: the result of the processing step :param str string: the string to load the JSON from
def read_info_string(self): infostring = [] self.cnxn.xfer([0x3F]) sleep(9e-3) for i in range(60): resp = self.cnxn.xfer([0x00])[0] infostring.append(chr(resp)) sleep(0.1) return ''.join(infostring)
Reads the information string for the OPC :rtype: string :Example: >>> alpha.read_info_string() 'OPC-N2 FirmwareVer=OPC-018.2....................BD'
def pick_coda_from_letter(letter): try: __, __, coda = \ split_phonemes(letter, onset=False, nucleus=False, coda=True) except ValueError: return None else: return coda
Picks only a coda from a Hangul letter. It returns ``None`` if the given letter is not Hangul.
def create(cls, object_type=None, object_uuid=None, **kwargs): assert 'pid_value' in kwargs kwargs.setdefault('status', cls.default_status) return super(DepositProvider, cls).create( object_type=object_type, object_uuid=object_uuid, **kwargs)
Create a new deposit identifier. :param object_type: The object type (Default: ``None``) :param object_uuid: The object UUID (Default: ``None``) :param kwargs: It contains the pid value.
def unmasked_blurred_image_of_planes_and_galaxies_from_padded_grid_stack_and_psf(planes, padded_grid_stack, psf): return [plane.unmasked_blurred_image_of_galaxies_from_psf(padded_grid_stack, psf) for plane in planes]
For lens data, compute the unmasked blurred image of every unmasked unblurred image of every galaxy in each \ plane. To do this, this function iterates over all planes and then galaxies to extract their unmasked unblurred \ images. If a galaxy in a plane has a pixelization, the unmasked image of that galax...
def _reference_table(cls, ref_table): cols = [(sa.Column(), refcol) for refcol in ref_table.primary_key] for col, refcol in cols: setattr(cls, "%s_%s" % (ref_table.name, refcol.name), col) cls.__table__.append_constraint(sa.ForeignKeyConstraint(*zip(*cols)))
Create a foreign key reference from the local class to the given remote table. Adds column references to the declarative class and adds a ForeignKeyConstraint.
def _write_submit_script(self, template, script_filename, job_name, configs): try: submit_script = Template(template).substitute(jobname=job_name, **configs) with open(script_filename, 'w') as f: f.write(submit_script) except KeyError as e: logger.erro...
Generate submit script and write it to a file. Args: - template (string) : The template string to be used for the writing submit script - script_filename (string) : Name of the submit script - job_name (string) : job name - configs (dict) : configs that g...
def set_doc_comment(self, doc, comment): if not self.doc_comment_set: self.doc_comment_set = True doc.comment = comment else: raise CardinalityError('Document::Comment')
Sets document comment, Raises CardinalityError if comment already set.
def __init(self,affiliation,role,jid=None,nick=None,actor=None,reason=None): if not affiliation: affiliation=None elif affiliation not in affiliations: raise ValueError("Bad affiliation") self.affiliation=affiliation if not role: role=None elif...
Initialize a `MucItem` object from a set of attributes. :Parameters: - `affiliation`: affiliation of the user. - `role`: role of the user. - `jid`: JID of the user. - `nick`: nickname of the user. - `actor`: actor modyfying the user data. ...
def check_predict_status(self, view_id, predict_request_id): failure_message = "Get status on predict failed" bare_response = self._get_success_json(self._get( 'v1/data_views/' + str(view_id) + '/predict/' + str(predict_request_id) + '/status', None, failure_message=failure_messa...
Returns a string indicating the status of the prediction job :param view_id: The data view id returned from data view create :param predict_request_id: The id returned from predict :return: Status data, also includes results if state is finished
def method_exists(cls, method): methods = cls.API_METHODS for key in method.split('.'): methods = methods.get(key) if methods is None: break if isinstance(methods, str): logger.debug('%r: %r', method, methods) return True re...
Whether a given method exists in the known API. Arguments: method (:py:class:`str`): The name of the method. Returns: :py:class:`bool`: Whether the method is in the known API.
def fix_in_stanza(self, stanza): StreamBase.fix_in_stanza(self, stanza) if not self.initiator: if stanza.from_jid != self.peer: stanza.set_from(self.peer)
Fix an incoming stanza. Ona server replace the sender address with authorized client JID.
def _validate_api_root(self): if not self._title: msg = "No 'title' in API Root for request '{}'" raise ValidationError(msg.format(self.url)) if not self._versions: msg = "No 'versions' in API Root for request '{}'" raise ValidationError(msg.format(self.ur...
Validates API Root information. Raises errors for required properties.
def to_geotiff(arr, path='./output.tif', proj=None, spec=None, bands=None, **kwargs): assert has_rasterio, "To create geotiff images please install rasterio" try: img_md = arr.rda.metadata["image"] x_size = img_md["tileXSize"] y_size = img_md["tileYSize"] except (AttributeError, Key...
Write out a geotiff file of the image Args: path (str): path to write the geotiff file to, default is ./output.tif proj (str): EPSG string of projection to reproject to spec (str): if set to 'rgb', write out color-balanced 8-bit RGB tif bands (list): list of bands to export. If spec...
def parse_docstring(self, func_or_method: typing.Callable) -> dict: docstring = func_or_method.__doc__ if not docstring: return {} docstring = docstring.split("---")[-1] parsed = yaml.safe_load(docstring) if not isinstance(parsed, dict): return {} ...
Given a function, parse the docstring as YAML and return a dictionary of info.
def push_bytes(self, data, force=False): self.STACK -= len(data) self.write_bytes(self.STACK, data, force) return self.STACK
Write `data` to the stack and decrement the stack pointer accordingly. :param str data: Data to write :param force: whether to ignore memory permissions
def Delimited(value, parser=Text, delimiter=u',', encoding=None): value = Text(value, encoding) if value is None or value == u'': return [] return map(parser, value.split(delimiter))
Parse a value as a delimited list. :type value: `unicode` or `bytes` :param value: Text value to parse. :type parser: `callable` taking a `unicode` parameter :param parser: Callable to map over the delimited text values. :type delimiter: `unicode` :param delimiter: Delimiter text. :ty...
def options(self, request, response): response['Allowed'] = ', '.join(self.meta.http_allowed_methods) response.status = http.client.OK
Process an `OPTIONS` request. Used to initiate a cross-origin request. All handling specific to CORS requests is done on every request however this method also returns a list of available methods.
def _pick_state_im_name(state_name, im_name, use_full_path=False): initial_dir = os.getcwd() if (state_name is None) or (im_name is None): wid = tk.Tk() wid.withdraw() if state_name is None: state_name = tkfd.askopenfilename( initialdir=initial_dir, title='Select pre-...
If state_name or im_name is None, picks them interactively through Tk, and then sets with or without the full path. Parameters ---------- state_name : {string, None} The name of the state. If None, selected through Tk. im_name : {string, None} The name of the image. ...
def load_background_sky_map(background_sky_map_path, background_sky_map_hdu, pixel_scale): if background_sky_map_path is not None: return ScaledSquarePixelArray.from_fits_with_pixel_scale(file_path=background_sky_map_path, hdu=background_sky_m...
Factory for loading the background sky from a .fits file. Parameters ---------- background_sky_map_path : str The path to the background_sky_map .fits file containing the background sky map \ (e.g. '/path/to/background_sky_map.fits'). background_sky_map_hdu : int The hdu the bac...
def flip(self, angle, center=None): return self.rotate(-angle, center=center).flip_y(center=center).rotate(angle, center=center)
Flip the shape in an arbitrary direction. Parameters ---------- angle : array-like The angle, in radians counter-clockwise from the horizontal axis, defining the angle about which to flip the shape (of a line through `center`). center : array-like, optional ...
def deepcopy(self): segmap = SegmentationMapOnImage(self.arr, shape=self.shape, nb_classes=self.nb_classes) segmap.input_was = self.input_was return segmap
Create a deep copy of the segmentation map object. Returns ------- imgaug.SegmentationMapOnImage Deep copy.
def get_args(self): args = [self._query_string] if self._no_content: args.append('NOCONTENT') if self._fields: args.append('INFIELDS') args.append(len(self._fields)) args += self._fields if self._verbatim: args.append('VERBATIM'...
Format the redis arguments for this query and return them
def _retrieve_data(self, request=None): full_url = "%s%s" % (self.endpoint, request) self.self_link = request self.request = requests.get(url=full_url, headers=self.default_headers()) self.request.encoding = "utf-8" try: self.request.raise_for_status() except ...
Retrieve Zotero items via the API Combine endpoint and request to access the specific resource Returns a JSON document
def default(self, obj): if isinstance(obj, np.ndarray): if obj.flags['C_CONTIGUOUS']: obj_data = obj.data else: cont_obj = np.ascontiguousarray(obj) assert(cont_obj.flags['C_CONTIGUOUS']) obj_data = cont_obj.data ...
If input object is an ndarray it will be converted into a dict holding dtype, shape and the data, base64 encoded.
def grey_pal(start=0.2, end=0.8): gamma = 2.2 ends = ((0.0, start, start), (1.0, end, end)) cdict = {'red': ends, 'green': ends, 'blue': ends} grey_cmap = mcolors.LinearSegmentedColormap('grey', cdict) def continuous_grey_palette(n): colors = [] for x in np.linspace(start**gamma, end...
Utility for creating continuous grey scale palette Parameters ---------- start : float grey value at low end of palette end : float grey value at high end of palette Returns ------- out : function Continuous color palette that takes a single :class:`int` par...
def loop_iteration(self, timeout = 0.1): try: exc_info = self.exc_queue.get(True, timeout)[1] except Queue.Empty: return exc_type, exc_value, ext_stack = exc_info raise exc_type, exc_value, ext_stack
Wait up to `timeout` seconds, raise any exception from the threads.
def get_popular_tournaments(self, **params: keys): url = self.api.POPULAR + '/tournament' return self._get_model(url, PartialTournament, **params)
Get a list of most queried tournaments \*\*keys: Optional[list] = None Filter which keys should be included in the response \*\*exclude: Optional[list] = None Filter which keys should be excluded from the response \*\*max: Optional[int] = None ...
def draw_heatmap_array(self, image_shape, alpha_lines=1.0, alpha_points=1.0, size_lines=1, size_points=0, antialiased=True, raise_if_out_of_image=False): heatmap_lines = self.draw_lines_heatmap_array( image_shape, alpha=alpha_lines, ...
Draw the line segments and points of the line string as a heatmap array. Parameters ---------- image_shape : tuple of int The shape of the image onto which to draw the line mask. alpha_lines : float, optional Opacity of the line string. Higher values denote a mo...
def extract_oembeds(text, args=None): resource_type = width = height = None if args: dimensions = args.lower().split('x') if len(dimensions) in (3, 1): resource_type = dimensions.pop() if len(dimensions) == 2: width, height = map(lambda x: int(x), dimensions) ...
Extract oembed resources from a block of text. Returns a list of dictionaries. Max width & height can be specified: {% for embed in block_of_text|extract_oembeds:"400x300" %} Resource type can be specified: {% for photo_embed in block_of_text|extract_oembeds:"photo" %} Or both: {% for em...
def create_parser(subparsers): parser = subparsers.add_parser( 'update', help='Update a topology', usage="%(prog)s [options] cluster/[role]/[env] <topology-name> " + "[--component-parallelism <name:value>] " + "[--container-number value] " + "[--runtime-config [component:]<name:val...
Create the parse for the update command
def get_txn_vol(transactions): txn_norm = transactions.copy() txn_norm.index = txn_norm.index.normalize() amounts = txn_norm.amount.abs() prices = txn_norm.price values = amounts * prices daily_amounts = amounts.groupby(amounts.index).sum() daily_values = values.groupby(values.index).sum() ...
Extract daily transaction data from set of transaction objects. Parameters ---------- transactions : pd.DataFrame Time series containing one row per symbol (and potentially duplicate datetime indices) and columns for amount and price. Returns ------- pd.DataFrame ...
def from_string(cls, key, password='notasecret'): key = _helpers._from_bytes(key) marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) if marker_id == 0: pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes, ...
Construct an RsaSigner instance from a string. Args: key: string, private key in PEM format. password: string, password for private key file. Unused for PEM files. Returns: RsaSigner instance. Raises: ValueError if the key ...
def compose_gerrit(projects): git_projects = [project for project in projects if 'git' in projects[project]] for project in git_projects: repos = [repo for repo in projects[project]['git'] if 'gitroot' in repo] if len(repos) > 0: projects[project]['gerrit'] = [] for repo in r...
Compose projects.json for gerrit, but using the git lists change: 'http://git.eclipse.org/gitroot/xwt/org.eclipse.xwt.git' to: 'git.eclipse.org_xwt/org.eclipse.xwt :param projects: projects.json :return: projects.json with gerrit
def averageOnTimePerTimestep(vectors, numSamples=None): if vectors.ndim == 1: vectors.shape = (-1,1) numTimeSteps = len(vectors) numElements = len(vectors[0]) if numSamples is not None: import pdb; pdb.set_trace() countOn = numpy.random.randint(0, numElements, numSamples) vectors = vectors[:...
Computes the average on-time of the outputs that are on at each time step, and then averages this over all time steps. This metric is resiliant to the number of outputs that are on at each time step. That is, if time step 0 has many more outputs on than time step 100, it won't skew the results. This is particu...
def _build_endpoint_url(self, url, name=None): if not url.endswith(self.URL_SEPERATOR): url = url + self.URL_SEPERATOR if name is None: name = '' return '%s%s%s' % (urlparse.urljoin(self.dsn, url), name, self.NAME_EXTENSION)
Method that constructs a full url with the given url and the snapshot name. Example: full_url = _build_endpoint_url('/users', '1') full_url => 'http://firebase.localhost/users/1.json'
def reverse(self): if self._original_target_content: with open(self.target, 'w') as fp: fp.write(self._original_target_content)
Restore content in target file to be before any changes
def has_edge_within_group(self, group): assert group in self.nodes.keys(),\ "{0} not one of the group of nodes".format(group) nodelist = self.nodes[group] for n1, n2 in self.simplified_edges(): if n1 in nodelist and n2 in nodelist: return True
Checks whether there are within-group edges or not.
def get_classic_link(vpc, **conn): result = {} try: cl_result = describe_vpc_classic_link(VpcIds=[vpc["id"]], **conn)[0] result["Enabled"] = cl_result["ClassicLinkEnabled"] dns_result = describe_vpc_classic_link_dns_support(VpcIds=[vpc["id"]], **conn)[0] result["DnsEnabled"] = dn...
Gets the Classic Link details about a VPC
async def message_handler(self, data): message = self.build_message(data) if not message: logger.error( '[%s] Unable to build Message with data, data=%s, error', self.engine_name, data ) return logger.info('[%s] ...
For each new message, build its platform specific message object and get a response.
def _extract(data, session=None): if isinstance(data, list): return [_extract(d, session) for d in data] if not isinstance(data, np.ndarray): return data if isinstance(data, MatlabObject): cls = session._get_user_class(data.classname) return cls.from_value(data) i...
Convert the Octave values to values suitable for Python.
def _normalised_numpy(self): dx = (self.screen.width / float(len(self.points))) oy = (self.screen.height) points = np.array(self.points) - self.minimum points = points * 4.0 / self.extents * self.size.y for x, y in enumerate(points): yield Point(( dx *...
Normalised data points using numpy.
def derive_random_states(random_state, n=1): seed_ = random_state.randint(SEED_MIN_VALUE, SEED_MAX_VALUE, 1)[0] return [new_random_state(seed_+i) for i in sm.xrange(n)]
Create N new random states based on an existing random state or seed. Parameters ---------- random_state : numpy.random.RandomState Random state or seed from which to derive new random states. n : int, optional Number of random states to derive. Returns ------- list of num...
def shift_time(start_time, mins) -> str: s_time = pd.Timestamp(start_time) e_time = s_time + np.sign(mins) * pd.Timedelta(f'00:{abs(mins)}:00') return e_time.strftime('%H:%M')
Shift start time by mins Args: start_time: start time in terms of HH:MM string mins: number of minutes (+ / -) Returns: end time in terms of HH:MM string
def filter_by_include_labels(self, issues): if not self.options.include_labels: return copy.deepcopy(issues) filtered_issues = [] include_labels = set(self.options.include_labels) for issue in issues: labels = [label["name"] for label in issue["labels"]] ...
Filter issues to include only issues with labels specified in include_labels. :param list(dict) issues: Pre-filtered issues. :rtype: list(dict) :return: Filtered issues.
def destroy(self): return self.get_data( "domains/%s/records/%s" % (self.domain, self.id), type=DELETE, )
Destroy the record
def merge_ph_times(times_list, times_par_list, time_block): offsets = np.arange(len(times_list)) * time_block cum_sizes = np.cumsum([ts.size for ts in times_list]) times = np.zeros(cum_sizes[-1]) times_par = np.zeros(cum_sizes[-1], dtype='uint8') i1 = 0 for i2, ts, ts_par, offset in zip(cum_size...
Build an array of timestamps joining the arrays in `ph_times_list`. `time_block` is the duration of each array of timestamps.
def _nbytes(buf): if isinstance(buf, memoryview): if PY3: return buf.nbytes else: size = buf.itemsize for dim in buf.shape: size *= dim return size else: return len(buf)
Return byte-size of a memoryview or buffer.
def scale_in(self, blocks=0, machines=0, strategy=None): count = 0 instances = self.client.servers.list() for instance in instances[0:machines]: print("Deleting : ", instance) instance.delete() count += 1 return count
Scale in resources
def license_is_oa(license): for oal in OA_LICENSES: if re.search(oal, license): return True return False
Return True if license is compatible with Open Access
def is_activated(self, images, augmenter, parents, default): if self.activator is None: return default else: return self.activator(images, augmenter, parents, default)
Returns whether an augmenter may be executed. Returns ------- bool If True, the augmenter may be executed. If False, it may not be executed.
def encrypt(privkey, passphrase): if isinstance(privkey, str): privkey = PrivateKey(privkey) else: privkey = PrivateKey(repr(privkey)) privkeyhex = repr(privkey) addr = format(privkey.bitcoin.address, "BTC") a = _bytes(addr) salt = hashlib.sha256(hashlib.sha256(a).digest()).diges...
BIP0038 non-ec-multiply encryption. Returns BIP0038 encrypted privkey. :param privkey: Private key :type privkey: Base58 :param str passphrase: UTF-8 encoded passphrase for encryption :return: BIP0038 non-ec-multiply encrypted wif key :rtype: Base58