code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def clean_course(self): course_id = self.cleaned_data[self.Fields.COURSE].strip() if not course_id: return None try: client = EnrollmentApiClient() return client.get_course_details(course_id) except (HttpClientError, HttpServerError): raise...
Verify course ID and retrieve course details.
def preprocess_constraints(ml, cl, n): "Create a graph of constraints for both must- and cannot-links" ml_graph, cl_graph = {}, {} for i in range(n): ml_graph[i] = set() cl_graph[i] = set() def add_both(d, i, j): d[i].add(j) d[j].add(i) for (i, j) in ml: ml_gr...
Create a graph of constraints for both must- and cannot-links
def save_policy(self, path): with open(path, 'wb') as f: pickle.dump(self.policy, f)
Pickles the current policy for later inspection.
def transform(self, X): X = check_array(X) X_rbf = np.empty_like(X) if self.copy else X X_in = X if not self.squared: np.power(X_in, 2, out=X_rbf) X_in = X_rbf if self.scale_by_median: scale = self.median_ if self.squared else self.median_ ** 2...
Turns distances into RBF values. Parameters ---------- X : array The raw pairwise distances. Returns ------- X_rbf : array of same shape as X The distances in X passed through the RBF kernel.
def create_rcontext(self, size, frame): if self.format == 'pdf': surface = cairo.PDFSurface(self._output_file(frame), *size) elif self.format in ('ps', 'eps'): surface = cairo.PSSurface(self._output_file(frame), *size) elif self.format == 'svg': surface = cair...
Called when CairoCanvas needs a cairo context to draw on
def load_by_pub_key(self, public_key): data = self.get_data("account/keys/") for jsoned in data['ssh_keys']: if jsoned.get('public_key', "") == public_key: self.id = jsoned['id'] self.load() return self return None
This method will load a SSHKey object from DigitalOcean from a public_key. This method will avoid problems like uploading the same public_key twice.
def parse_litezip(path): struct = [parse_collection(path)] struct.extend([parse_module(x) for x in path.iterdir() if x.is_dir() and x.name.startswith('m')]) return tuple(sorted(struct))
Parse a litezip file structure to a data structure given the path to the litezip directory.
def get_name(): if env.vm_type == EC2: for instance in get_all_running_ec2_instances(): if env.host_string == instance.public_dns_name: name = instance.tags.get(env.vm_name_tag) return name else: raise NotImplementedError
Retrieves the instance name associated with the current host string.
def get_long_short_pos(positions): pos_wo_cash = positions.drop('cash', axis=1) longs = pos_wo_cash[pos_wo_cash > 0].sum(axis=1).fillna(0) shorts = pos_wo_cash[pos_wo_cash < 0].sum(axis=1).fillna(0) cash = positions.cash net_liquidation = longs + shorts + cash df_pos = pd.DataFrame({'long': long...
Determines the long and short allocations in a portfolio. Parameters ---------- positions : pd.DataFrame The positions that the strategy takes over time. Returns ------- df_long_short : pd.DataFrame Long and short allocations as a decimal percentage of the total net liq...
def jhk_to_sdssz(jmag,hmag,kmag): return convert_constants(jmag,hmag,kmag, SDSSZ_JHK, SDSSZ_JH, SDSSZ_JK, SDSSZ_HK, SDSSZ_J, SDSSZ_H, SDSSZ_K)
Converts given J, H, Ks mags to an SDSS z magnitude value. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags of the object. Returns ------- float The converted SDSS z band magnitude.
def _getClassifierRegion(self): if (self._netInfo.net is not None and "Classifier" in self._netInfo.net.regions): return self._netInfo.net.regions["Classifier"] else: return None
Returns reference to the network's Classifier region
def unlink_user(self, enterprise_customer, user_email): try: existing_user = User.objects.get(email=user_email) link_record = self.get(enterprise_customer=enterprise_customer, user_id=existing_user.id) link_record.delete() if update_user: update_us...
Unlink user email from Enterprise Customer. If :class:`django.contrib.auth.models.User` instance with specified email does not exist, :class:`.PendingEnterpriseCustomerUser` instance is deleted instead. Raises EnterpriseCustomerUser.DoesNotExist if instance of :class:`django.contrib.auth.model...
def freeze(proto_dataset_uri): proto_dataset = dtoolcore.ProtoDataSet.from_uri( uri=proto_dataset_uri, config_path=CONFIG_PATH ) num_items = len(list(proto_dataset._identifiers())) max_files_limit = int(dtoolcore.utils.get_config_value( "DTOOL_MAX_FILES_LIMIT", CONFIG_PAT...
Convert a proto dataset into a dataset. This step is carried out after all files have been added to the dataset. Freezing a dataset finalizes it with a stamp marking it as frozen.
def draw(self, time: float, frametime: float, target: moderngl.Framebuffer): raise NotImplementedError("draw() is not implemented")
Draw function called by the system every frame when the effect is active. This method raises ``NotImplementedError`` unless implemented. Args: time (float): The current time in seconds. frametime (float): The time the previous frame used to render in seconds. target ...
def data_to_tfrecord(images, labels, filename): if os.path.isfile(filename): print("%s exists" % filename) return print("Converting data into %s ..." % filename) writer = tf.python_io.TFRecordWriter(filename) for index, img in enumerate(images): img_raw = img.tobytes() la...
Save data into TFRecord.
def halfadder_gate(variables, vartype=dimod.BINARY, name='HALF_ADDER'): variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 1)]) else: ...
Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input val...
def regenerate_good_tokens(string): toks = nltk.word_tokenize(string) pos_string = nltk.pos_tag(toks) pos_seq = [tag[1] for tag in pos_string] pos_ngrams = ngrams(pos_seq, 2, 4) sel_pos_ngrams = f7(pos_ngrams) return sel_pos_ngrams
Given an input string, part of speech tags the string, then generates a list of ngrams that appear in the string. Used to define grammatically correct part of speech tag sequences. Returns a list of part of speech tag sequences.
def fetch(self): from ..iq import Iq jid,node = self.address iq = Iq(to_jid = jid, stanza_type = "get") disco = self.disco_class(node) iq.add_content(disco.xmlnode) self.stream.set_response_handlers(iq,self.__response, self.__error, self.__timeout) ...
Initialize the Service Discovery process.
def convert_compound(mass, source, target, element): target_mass_fraction = element_mass_fraction(target, element) if target_mass_fraction == 0.0: return 0.0 else: source_mass_fraction = element_mass_fraction(source, element) return mass * source_mass_fraction / target_mass_fraction
Convert the specified mass of the source compound to the target using element as basis. :param mass: Mass of from_compound. [kg] :param source: Formula and phase of the original compound, e.g. 'Fe2O3[S1]'. :param target: Formula and phase of the target compound, e.g. 'Fe[S1]'. :param element:...
def is_connected(self): try: self.exec_command(b"Query(ConnectionState)") return self.status.connection_state.startswith(b"C(") except NotConnectedException: return False
Return bool indicating connection state
def load_class(path): package, klass = path.rsplit('.', 1) module = import_module(package) return getattr(module, klass)
dynamically load a class given a string of the format package.Class
def compose_projects_json(projects, data): projects = compose_git(projects, data) projects = compose_mailing_lists(projects, data) projects = compose_bugzilla(projects, data) projects = compose_github(projects, data) projects = compose_gerrit(projects) projects = compose_mbox(projects) retur...
Compose projects.json with all data sources :param projects: projects.json :param data: eclipse JSON :return: projects.json with all data sources
def model_returns_t_alpha_beta(data, bmark, samples=2000, progressbar=True): data_bmark = pd.concat([data, bmark], axis=1).dropna() with pm.Model() as model: sigma = pm.HalfCauchy( 'sigma', beta=1) nu = pm.Exponential('nu_minus_two', 1. / 10.) X = data_bmark.iloc[...
Run Bayesian alpha-beta-model with T distributed returns. This model estimates intercept (alpha) and slope (beta) of two return sets. Usually, these will be algorithm returns and benchmark returns (e.g. S&P500). The data is assumed to be T distributed and thus is robust to outliers and takes tail event...
def _collapse_edge_by_namespace(graph: BELGraph, victim_namespaces: Strings, survivor_namespaces: str, relations: Strings) -> None: relation_filter = build_relation_predicate(relations) source_namespace_filter = buil...
Collapse pairs of nodes with the given namespaces that have the given relationship. :param graph: A BEL Graph :param victim_namespaces: The namespace(s) of the node to collapse :param survivor_namespaces: The namespace of the node to keep :param relations: The relation(s) to search
def get(self, key, default=None): if key in self: return self.__getitem__(key) else: return default
Return the key if exists or a default value :param str value: Value :param str default: Default value if key not present
def _parse_string(self, xml): if not isinstance(xml, HTMLElement): xml = dhtmlparser.parseString(str(xml)) record = xml.find("record") if not record: raise ValueError("There is no <record> in your MARC XML document!") record = record[0] self.oai_marc = len...
Parse MARC XML document to dicts, which are contained in self.controlfields and self.datafields. Args: xml (str or HTMLElement): input data Also detect if this is oai marc format or not (see elf.oai_marc).
def sendmsg(self, message, recipient_mobiles=[], url='http://services.ambientmobile.co.za/sms', concatenate_message=True, message_id=str(time()).replace(".", ""), reply_path=None, allow_duplicates=True, ...
Send a mesage via the AmbientSMS API server
def add_tags(self, item, *tags): try: assert item["data"]["tags"] except AssertionError: item["data"]["tags"] = list() for tag in tags: item["data"]["tags"].append({"tag": "%s" % tag}) assert self.check_items([item]) return self.update_item(ite...
Add one or more tags to a retrieved item, then update it on the server Accepts a dict, and one or more tags to add to it Returns the updated item from the server
def estimate(self): self.mul(300) self.Cpig(300) estimates = {'Tb': self.Tb(self.counts), 'Tm': self.Tm(self.counts), 'Tc': self.Tc(self.counts, self.Tb_estimated), 'Pc': self.Pc(self.counts, self.atom_count), '...
Method to compute all available properties with the Joback method; returns their results as a dict. For the tempearture dependent values Cpig and mul, both the coefficients and objects to perform calculations are returned.
def split_code_and_text_blocks(source_file): docstring, rest_of_content = get_docstring_and_rest(source_file) blocks = [('text', docstring)] pattern = re.compile( r'(?P<header_line>^ flags=re.M) pos_so_far = 0 for match in re.finditer(pattern, rest_of_content): match_start_po...
Return list with source file separated into code and text blocks. Returns ------- blocks : list of (label, content) List where each element is a tuple with the label ('text' or 'code'), and content string of block.
def derep_concat_split(data, sample, nthreads, force): LOGGER.info("INSIDE derep %s", sample.name) mergefile = os.path.join(data.dirs.edits, sample.name+"_merged_.fastq") if not force: if not os.path.exists(mergefile): sample.files.edits = concat_multiple_edits(data, sample) else...
Running on remote Engine. Refmaps, then merges, then dereplicates, then denovo clusters reads.
def _get_filter_field(field_name, field_value): filter_field = None if isinstance(field_value, ValueRange): range_values = {} if field_value.lower: range_values.update({"gte": field_value.lower_string}) if field_value.upper: range_values.update({"lte": field_value...
Return field to apply into filter, if an array then use a range, otherwise look for a term match
def to_line_string(self, closed=True): from imgaug.augmentables.lines import LineString if not closed or len(self.exterior) <= 1: return LineString(self.exterior, label=self.label) return LineString( np.concatenate([self.exterior, self.exterior[0:1, :]], axis=0), ...
Convert this polygon's `exterior` to a ``LineString`` instance. Parameters ---------- closed : bool, optional Whether to close the line string, i.e. to add the first point of the `exterior` also as the last point at the end of the line string. This has no eff...
def set_name(self, name): if not self._campfire.get_user().admin: return False result = self._connection.put("room/%s" % self.id, {"room": {"name": name}}) if result["success"]: self._load() return result["success"]
Set the room name. Args: name (str): Name Returns: bool. Success
def base_url(self): return '{proto}://{host}:{port}{url_path}'.format( proto=self.protocol, host=self.host, port=self.port, url_path=self.url_path, )
A base_url that will be used to construct the final URL we're going to query against. :returns: A URL of the form: ``proto://host:port``. :rtype: :obj:`string`
def stop_apps_or_services(app_or_service_names=None, rm_containers=False): if app_or_service_names: log_to_client("Stopping the following apps or services: {}".format(', '.join(app_or_service_names))) else: log_to_client("Stopping all running containers associated with Dusty") compose.stop_r...
Stop any currently running Docker containers associated with Dusty, or associated with the provided apps_or_services. Does not remove the service's containers.
def config_to_args(config): result = [] for key, value in iteritems(config): if value is False: continue key = '--{0}'.format(key.replace('_', '-')) if isinstance(value, (list, set, tuple)): for item in value: result.extend((key, smart_str(item))) ...
Convert config dict to arguments list. :param config: Configuration dict.
def register_fetcher(self, object_class, fetcher_class): self._lock.acquire() try: cache = self._caches.get(object_class) if not cache: cache = Cache(self.max_items, self.default_freshness_period, self.default_expiration_period, self.defaul...
Register a fetcher class for an object class. :Parameters: - `object_class`: class to be retrieved by the fetcher. - `fetcher_class`: the fetcher class. :Types: - `object_class`: `classobj` - `fetcher_class`: `CacheFetcher` based class
def custom(cls, customgrouper): if customgrouper is None: raise TypeError("Argument to custom() must be ICustomGrouping instance or classpath") if not isinstance(customgrouper, ICustomGrouping) and not isinstance(customgrouper, str): raise TypeError("Argument to custom() must be ICustomGrouping inst...
Custom grouping from a given implementation of ICustomGrouping :param customgrouper: The ICustomGrouping implemention to use
def _validate_schema(obj): if obj is not None and not isinstance(obj, Schema): raise IncompatibleSchema('Schema must be of type {0}'.format(Schema)) return obj
Ensures the passed schema instance is compatible :param obj: object to validate :return: obj :raises: - IncompatibleSchema if the passed schema is of an incompatible type
def _printAvailableCheckpoints(experimentDir): checkpointParentDir = getCheckpointParentDir(experimentDir) if not os.path.exists(checkpointParentDir): print "No available checkpoints." return checkpointDirs = [x for x in os.listdir(checkpointParentDir) if _isCheckpointDir(os.path.join(...
List available checkpoints for the specified experiment.
def update_oai_info(self): for field in record_get_field_instances(self.record, '909', ind1="C", ind2="O"): new_subs = [] for tag, value in field[0]: if tag == "o": new_subs.append(("a", value)) else: new_subs.append...
Add the 909 OAI info to 035.
def process_configuration_success(self, stanza): _unused = stanza self.configured = True self.handler.room_configured()
Process success response for a room configuration request. :Parameters: - `stanza`: the stanza received. :Types: - `stanza`: `Presence`
def _createPeriodicActivities(self): periodicActivities = [] class MetricsReportCb(object): def __init__(self, taskRunner): self.__taskRunner = taskRunner return def __call__(self): self.__taskRunner._getAndEmitExperimentMetrics() reportMetrics = PeriodicActivityRequest( ...
Creates and returns a list of activites for this TaskRunner instance Returns: a list of PeriodicActivityRequest elements
def newcursor(self, dictcursor=False): handle = hashlib.sha256(os.urandom(12)).hexdigest() if dictcursor: self.cursors[handle] = self.connection.cursor( cursor_factory=psycopg2.extras.DictCursor ) else: self.cursors[handle] = self.connection.cu...
This creates a DB cursor for the current DB connection using a randomly generated handle. Returns a tuple with cursor and handle. Parameters ---------- dictcursor : bool If True, returns a cursor where each returned row can be addressed as a dictionary by column...
def create(self): params = { "name": self.name, "type": self.type, "dns_names": self.dns_names, "private_key": self.private_key, "leaf_certificate": self.leaf_certificate, "certificate_chain": self.certificate_chain } data =...
Create the Certificate
def _bundle_exists(self, path): for attached_bundle in self._attached_bundles: if path == attached_bundle.path: return True return False
Checks if a bundle exists at the provided path :param path: Bundle path :return: bool
def _validate_subnet_cidr(context, network_id, new_subnet_cidr): if neutron_cfg.cfg.CONF.allow_overlapping_ips: return try: new_subnet_ipset = netaddr.IPSet([new_subnet_cidr]) except TypeError: LOG.exception("Invalid or missing cidr: %s" % new_subnet_cidr) raise n_exc.BadRequ...
Validate the CIDR for a subnet. Verifies the specified CIDR does not overlap with the ones defined for the other subnets specified for this network, or with any other CIDR if overlapping IPs are disabled.
def oauth2_callback(request): if 'error' in request.GET: reason = request.GET.get( 'error_description', request.GET.get('error', '')) reason = html.escape(reason) return http.HttpResponseBadRequest( 'Authorization failed {0}'.format(reason)) try: encoded_s...
View that handles the user's return from OAuth2 provider. This view verifies the CSRF state and OAuth authorization code, and on success stores the credentials obtained in the storage provider, and redirects to the return_url specified in the authorize view and stored in the session. Args: ...
def is_action_available(self, action): temp_state = np.rot90(self._state, action) return self._is_action_available_left(temp_state)
Determines whether action is available. That is, executing it would change the state.
def agitate(self): for (varName, var) in self.permuteVars.iteritems(): var.agitate() self.newPosition()
Agitate this particle so that it is likely to go to a new position. Every time agitate is called, the particle is jiggled an even greater amount. Parameters: -------------------------------------------------------------- retval: None
def get_object(cls, api_token, ssh_key_id): ssh_key = cls(token=api_token, id=ssh_key_id) ssh_key.load() return ssh_key
Class method that will return a SSHKey object by ID.
def is_redirecting(path): candidate = unipath(path, '.cpenv') return os.path.exists(candidate) and os.path.isfile(candidate)
Returns True if path contains a .cpenv file
def is_text(self): return self.type in [ self._TYPE_PASTE, self._TYPE_TEXT, self._TYPE_TWEET ]
Tells if this message is a text message. Returns: bool. Success
def credentials(self): ctx = _app_ctx_stack.top if not hasattr(ctx, _CREDENTIALS_KEY): ctx.google_oauth2_credentials = self.storage.get() return ctx.google_oauth2_credentials
The credentials for the current user or None if unavailable.
def from_uint8(arr_uint8, shape, min_value=0.0, max_value=1.0): arr_0to1 = arr_uint8.astype(np.float32) / 255.0 return HeatmapsOnImage.from_0to1(arr_0to1, shape, min_value=min_value, max_value=max_value)
Create a heatmaps object from an heatmap array containing values ranging from 0 to 255. Parameters ---------- arr_uint8 : (H,W) ndarray or (H,W,C) ndarray Heatmap(s) array, where ``H`` is height, ``W`` is width and ``C`` is the number of heatmap channels. Expected dtype ...
def f_theta(cos_theta, zint, z, n2n1=0.95, sph6_ab=None, **kwargs): wvfront = (np.outer(np.ones_like(z)*zint, cos_theta) - np.outer(zint+z, csqrt(n2n1**2-1+cos_theta**2))) if (sph6_ab is not None) and (not np.isnan(sph6_ab)): sec2_theta = 1.0/(cos_theta*cos_theta) wvfront += sph6_ab ...
Returns the wavefront aberration for an aberrated, defocused lens. Calculates the portions of the wavefront distortion due to z, theta only, for a lens with defocus and spherical aberration induced by coverslip mismatch. (The rho portion can be analytically integrated to Bessels.) Parameters -...
def parse_time(block_time): return datetime.strptime(block_time, timeFormat).replace(tzinfo=timezone.utc)
Take a string representation of time from the blockchain, and parse it into datetime object.
def sample(field, inds=None, slicer=None, flat=True): if inds is not None: out = field.ravel()[inds] elif slicer is not None: out = field[slicer].ravel() else: out = field if flat: return out.ravel() return out
Take a sample from a field given flat indices or a shaped slice Parameters ----------- inds : list of indices One dimensional (raveled) indices to return from the field slicer : slice object A shaped (3D) slicer that returns a section of image flat : boolean Whether to fla...
def send_message(self): start = time.time() message = None if not self.initialized: message = self.construct_start_message() self.initialized = True else: message = self.construct_end_message() self.send_UDP_message(message) end = time....
Send message over UDP. If tracking is disables, the bytes_sent will always be set to -1 Returns: (bytes_sent, time_taken)
def publish(self, pid=None, id_=None): pid = pid or self.pid if not pid.is_registered(): raise PIDInvalidAction() self['_deposit']['status'] = 'published' if self['_deposit'].get('pid') is None: self._publish_new(id_=id_) else: record = self._p...
Publish a deposit. If it's the first time: * it calls the minter and set the following meta information inside the deposit: .. code-block:: python deposit['_deposit'] = { 'type': pid_type, 'value': pid_value, 'revision_i...
def _as_dict(self, r): d = dict() for i, f in enumerate(self._field_names): d[f] = r[i] if i < len(r) else None return d
Convert the record to a dictionary using field names as keys.
def get_connection(module_name: str, connection: Optional[str] = None) -> str: if connection is not None: return connection module_name = module_name.lower() module_config_cls = get_module_config_cls(module_name) module_config = module_config_cls.load() return module_config.connection or con...
Return the SQLAlchemy connection string if it is set. Order of operations: 1. Return the connection if given as a parameter 2. Check the environment for BIO2BEL_{module_name}_CONNECTION 3. Look in the bio2bel config file for module-specific connection. Create if doesn't exist. Check the module-...
def circular(cls, shape, pixel_scale, radius_arcsec, centre=(0., 0.), invert=False): mask = mask_util.mask_circular_from_shape_pixel_scale_and_radius(shape, pixel_scale, radius_arcsec, centre) if invert: mask = np.invert(mask) ...
Setup a mask where unmasked pixels are within a circle of an input arc second radius and centre. Parameters ---------- shape: (int, int) The (y,x) shape of the mask in units of pixels. pixel_scale: float The arc-second to pixel conversion factor of each pixel. ...
def _time_independent_equals(a, b): if len(a) != len(b): return False result = 0 if isinstance(a[0], int): for x, y in zip(a, b): result |= x ^ y else: for x, y in zip(a, b): result |= ord(x) ^ ord(y) return result == 0
This compares two values in constant time. Taken from tornado: https://github.com/tornadoweb/tornado/blob/ d4eb8eb4eb5cc9a6677e9116ef84ded8efba8859/tornado/web.py#L3060
def logs(self, prefix='worker'): logs = [] logs += [('success_rate', np.mean(self.success_history))] if self.compute_Q: logs += [('mean_Q', np.mean(self.Q_history))] logs += [('episode', self.n_episodes)] if prefix != '' and not prefix.endswith('/'): retur...
Generates a dictionary that contains all collected statistics.
def count(self, *args, **kwargs): search = self.create_search(*args, **kwargs) try: return search.count() except NotFoundError: print_error("The index was not found, have you initialized the index?") except (ConnectionError, TransportError): print_erro...
Returns the number of results after filtering with the given arguments.
def generate(self): tar_bytes = BytesIO() tar = tarfile.open(fileobj=tar_bytes, mode='w') self._generate_contents(tar) self._process_files(tar) tar.close() tar_bytes.seek(0) gzip_bytes = BytesIO() gz = gzip.GzipFile(fileobj=gzip_bytes, mode='wb', mtime=0) ...
Returns a ``BytesIO`` instance representing an in-memory tar.gz archive containing the native router configuration. :returns: in-memory tar.gz archive, instance of ``BytesIO``
def parent_callback(self, parent_fu): if parent_fu.done() is True: e = parent_fu._exception if e: super().set_exception(e) else: super().set_result(self.file_obj) return
Callback from executor future to update the parent. Args: - parent_fu (Future): Future returned by the executor along with callback Returns: - None Updates the super() with the result() or exception()
def get_prefix(self, include_version=True): host = settings.host if '://' not in host: host = 'https://%s' % host.strip('/') elif host.startswith('http://') and settings.verify_ssl: raise exc.TowerCLIError( 'Can not verify ssl with non-https protocol. Chan...
Return the appropriate URL prefix to prepend to requests, based on the host provided in settings.
def sys_deallocate(self, cpu, addr, size): logger.info("DEALLOCATE(0x%08x, %d)" % (addr, size)) if addr & 0xfff != 0: logger.info("DEALLOCATE: addr is not page aligned") return Decree.CGC_EINVAL if size == 0: logger.info("DEALLOCATE:length is zero") ...
deallocate - remove allocations The deallocate system call deletes the allocations for the specified address range, and causes further references to the addresses within the range to generate invalid memory accesses. The region is also automatically deallocated when the process is term...
def _sm_cleanup(self, *args, **kwargs): if self._done_notification_func is not None: self._done_notification_func() self._timer.cancel()
Delete all state associated with the chaos session
def integral_approx_estimator(x, y): a, b = (0., 0.) x = np.array(x) y = np.array(y) idx, idy = (np.argsort(x), np.argsort(y)) for x1, x2, y1, y2 in zip(x[[idx]][:-1], x[[idx]][1:], y[[idx]][:-1], y[[idx]][1:]): if x1 != x2 and y1 != y2: a = a + np.log(np.abs((y2 - y1) / (x2 - x1...
Integral approximation estimator for causal inference. :param x: input variable x 1D :param y: input variable y 1D :return: Return value of the IGCI model >0 if x->y otherwise if return <0
def evaluate(self, repo, spec, args): status = [] if len(spec['files']) == 0: return status with cd(repo.rootdir): rules = None if 'rules-files' in spec and len(spec['rules-files']) > 0: rulesfiles = spec['rules-files'] rule...
Evaluate the files identified for checksum.
def swap_buffers(self): self.frames += 1 if self.headless_frames and self.frames >= self.headless_frames: self.close()
Headless window currently don't support double buffering. We only increment the frame counter here.
def spend_key(self): key = self._backend.spend_key() if key == numbers.EMPTY_KEY: return None return key
Returns private spend key. None if wallet is view-only. :rtype: str or None
def put(self, task, *args, **kwargs): if not self.isopen: logger = logging.getLogger(__name__) logger.warning('the drop box is not open') return package = TaskPackage(task=task, args=args, kwargs=kwargs) return self.dropbox.put(package)
put a task and its arguments If you need to put multiple tasks, it can be faster to put multiple tasks with `put_multiple()` than to use this method multiple times. Parameters ---------- task : a function A function to be executed args : list ...
def get_traffic_meter(self): _LOGGER.info("Get traffic meter") def parse_text(text): def tofloats(lst): return (float(t) for t in lst) try: if "/" in text: return tuple(tofloats(text.split('/'))) elif ":" in text: ...
Return dict of traffic meter stats. Returns None if error occurred.
def console_output(self, instance=None): if instance is None: instance = self.instance() for stage in instance['stages']: for job in stage['jobs']: if job['result'] not in self.final_results: continue artifact = self.artifact( ...
Yields the output and metadata from all jobs in the pipeline Args: instance: The result of a :meth:`instance` call, if not supplied the latest of the pipeline will be used. Yields: tuple: (metadata (dict), output (str)). metadata contains: - pipel...
def T_dependent_property_integral(self, T1, T2): r Tavg = 0.5*(T1+T2) if self.method: if self.test_method_validity(Tavg, self.method): try: return self.calculate_integral(T1, T2, self.method) except: pass ...
r'''Method to calculate the integral of a property with respect to temperature, using a specified method. Methods found valid by `select_valid_methods` are attempted until a method succeeds. If no methods are valid and succeed, None is returned. Calls `calculate_integral` inte...
def validate(method): name_error = 'configuration option "{}" is not supported' @functools.wraps(method) def validator(self, name, *args): if name not in self.allowed_opts: raise ValueError(name_error.format(name)) return method(self, name, *args) return validator
Config option name value validator decorator.
def _usage(prog_name=os.path.basename(sys.argv[0])): spacer = ' ' * len('usage: ') usage = prog_name + ' -b LIST [-S SEPARATOR] [file ...]\n' \ + spacer + prog_name + ' -c LIST [-S SEPERATOR] [file ...]\n' \ + spacer + prog_name \ + ' -f LIST [-d DELIM] [-e] [-S SEPERATOR] [-s] [file ...]' ...
Returns usage string with no trailing whitespace.
def compat_convertHashedIndexes(self, objs, conn=None): if conn is None: conn = self._get_connection() fields = [] for indexedField in self.indexedFields: origField = self.fields[indexedField] if 'hashIndex' not in origField.__class__.__new__.__code__.co_varnames: continue if indexedField.hashInde...
compat_convertHashedIndexes - Reindex all fields for the provided objects, where the field value is hashed or not. If the field is unhashable, do not allow. NOTE: This works one object at a time. It is intended to be used while your application is offline, as it doesn't make sense to be changing your model ...
def delete_switch(apps, schema_editor): Switch = apps.get_model('waffle', 'Switch') Switch.objects.filter(name=ENTERPRISE_ROLE_BASED_ACCESS_CONTROL_SWITCH).delete()
Delete the `role_based_access_control` switch.
def execute_nonstop_tasks(self, tasks_cls): self.execute_batch_tasks(tasks_cls, self.conf['sortinghat']['sleep_for'], self.conf['general']['min_update_delay'], False)
Just a wrapper to the execute_batch_tasks method
def reset_parameters(self): stdv = 1. / math.sqrt(self.weight.size(1)) self.weight.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv)
Reset the parameters.
def strip_codes(s: Any) -> str: return codepat.sub('', str(s) if (s or (s == 0)) else '')
Strip all color codes from a string. Returns empty string for "falsey" inputs.
def seek(self, relative_position): self._player_interface.Seek(Int64(1000.0 * 1000 * relative_position)) self.seekEvent(self, relative_position)
Seek the video by `relative_position` seconds Args: relative_position (float): The position in seconds to seek to.
def merge_with_published(self): pid, first = self.fetch_published() lca = first.revisions[self['_deposit']['pid']['revision_id']] args = [lca.dumps(), first.dumps(), self.dumps()] for arg in args: del arg['$schema'], arg['_deposit'] args.append({}) m = Merger(...
Merge changes with latest published version.
def parse_unique_urlencoded(content): urlencoded_params = urllib.parse.parse_qs(content) params = {} for key, value in six.iteritems(urlencoded_params): if len(value) != 1: msg = ('URL-encoded content contains a repeated value:' '%s -> %s' % (key, ', '.join(value))) ...
Parses unique key-value parameters from urlencoded content. Args: content: string, URL-encoded key-value pairs. Returns: dict, The key-value pairs from ``content``. Raises: ValueError: if one of the keys is repeated.
def _issue_cert(self, domain): def errback(failure): failure.trap(txacme_ServerError) acme_error = failure.value.message if acme_error.code in ['rateLimited', 'serverInternal', 'connection', 'unknownHost']: self.log.error( ...
Issue a certificate for the given domain.
def get_all_locations(self, timeout: int=None): url = self.api.LOCATIONS return self._get_model(url, timeout=timeout)
Get a list of all locations Parameters ---------- timeout: Optional[int] = None Custom timeout that overwrites Client.timeout
def set_fan_power(self, power): if power > 255: raise ValueError("The fan power should be a single byte (0-255).") a = self.cnxn.xfer([0x42])[0] sleep(10e-3) b = self.cnxn.xfer([0x00])[0] c = self.cnxn.xfer([power])[0] sleep(0.1) return True if a == 0x...
Set only the Fan power. :param power: Fan power value as an integer between 0-255. :type power: int :rtype: boolean :Example: >>> alpha.set_fan_power(255) True
def transform_launch_points(self, content_metadata_item): return [{ 'providerID': self.enterprise_configuration.provider_id, 'launchURL': content_metadata_item['enrollment_url'], 'contentTitle': content_metadata_item['title'], 'contentID': self.get_content_id(cont...
Return the content metadata item launch points. SAPSF allows you to transmit an arry of content launch points which are meant to represent sections of a content item which a learner can launch into from SAPSF. Currently, we only provide a single launch point for a content item.
def getScript(self, scriptname): script = self.description.get('scripts', {}).get(scriptname, None) if script is not None: if isinstance(script, str) or isinstance(script, type(u'unicode string')): import shlex script = shlex.split(script) if len(s...
Return the specified script command. If the first part of the command is a .py file, then the current python interpreter is prepended. If the script is a single string, rather than an array, it is shlex-split.
def render_to_string(template, extra=None): from jinja2 import Template extra = extra or {} final_fqfn = find_template(template) assert final_fqfn, 'Template not found: %s' % template template_content = open(final_fqfn, 'r').read() t = Template(template_content) if extra: context = e...
Renders the given template to a string.
def get_key(key_name, value_name, jsonify, no_decrypt, stash, passphrase, backend): if value_name and no_decrypt: sys.exit('VALUE_NAME cannot be used in conjuction with --no-decrypt') stash = _get_stash(backend, stash, passphrase, q...
Retrieve a key from the stash \b `KEY_NAME` is the name of the key to retrieve `VALUE_NAME` is a single value to retrieve e.g. if the value of the key `test` is `a=b,b=c`, `ghost get test a`a will return `b`
def get_days_span(self, month_index): is_first_month = month_index == 0 is_last_month = month_index == self.__len__() - 1 y = int(self.start_date.year + (self.start_date.month + month_index) / 13) m = int((self.start_date.month + month_index) % 12 or 12) total = calendar.monthran...
Calculate how many days the month spans.
def get_new_apikey(lcc_server): USERHOME = os.path.expanduser('~') APIKEYFILE = os.path.join(USERHOME, '.astrobase', 'lccs', 'apikey-%s' % lcc_server.replace( 'https://', ...
This gets a new API key from the specified LCC-Server. NOTE: this only gets an anonymous API key. To get an API key tied to a user account (and associated privilege level), see the `import_apikey` function below. Parameters ---------- lcc_server : str The base URL of the LCC-Server fr...
def inject_method(self, func, name=None): new_method = func.__get__(self, self.__class__) if name is None: name = func.__name__ setattr(self, name, new_method)
Injects a function into an object instance as a bound method The main use case of this function is for monkey patching. While monkey patching is sometimes necessary it should generally be avoided. Thus, we simply remind the developer that there might be a better way. Args: self (object): insta...