code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def fit(self, X, y=None): n = X.shape[0] if X.shape != (n, n): raise TypeError("Input must be a square matrix.") memory = get_memory(self.memory) vals, vecs = memory.cache(scipy.linalg.eigh, ignore=['overwrite_a'])( X, overwrite_a=not self.copy) vals = val...
Learn the linear transformation to flipped eigenvalues. Parameters ---------- X : array, shape [n, n] The *symmetric* input similarities. If X is asymmetric, it will be treated as if it were symmetric based on its lower-triangular part.
def build_twisted_request(self, method, url, extra_headers={}, body_producer=None, full_url=False): uri = url if full_url else self._url(url) raw_headers = self.get_headers() if extra_headers: raw_headers.update(extra_headers) headers = http_headers.Headers() for head...
Build a request for twisted Args: method (str): Request method (GET/POST/PUT/DELETE/etc.) If not specified, it will be POST if post_data is not None url (str): Destination URL (full, or relative) Kwargs: extra_headers (dict): Headers (override default connection hea...
def add_item(self, key, value, after=False, index=None, pos_key=None, replace=True): if self._validate_fn: self._validate_fn(value) if (index is not None) and (pos_key is not None): raise ValueError('Either specify index or pos_key, not both.') elif pos_key is...
Add an item at a specific location, possibly replacing the existing item. If after is True, we insert *after* the given index, otherwise we insert before. The position is specified using either index or pos_key, the former specifies the position from the start of the array (bas...
def put_multiple(self, task_args_kwargs_list): if not self.isopen: logger = logging.getLogger(__name__) logger.warning('the drop box is not open') return packages = [ ] for t in task_args_kwargs_list: try: task = t['task'] ...
put a list of tasks and their arguments This method can be used to put multiple tasks at once. Calling this method once with multiple tasks can be much faster than calling `put()` multiple times. Parameters ---------- task_args_kwargs_list : list A list of ...
def cmd_add(opts): config = load_config(opts.config) b = get_blockade(config, opts) b.add_container(opts.containers)
Add one or more existing Docker containers to a Blockade group
def print_exception(etype, value, tb, limit=None, file=None): if file is None: file = open('/dev/stderr', 'w') if tb: _print(file, 'Traceback (most recent call last):') print_tb(tb, limit, file) lines = format_exception_only(etype, value) for line in lines: _print(file, l...
Print exception up to 'limit' stack trace entries from 'tb' to 'file'. This differs from print_tb() in the following ways: (1) if traceback is not None, it prints a header "Traceback (most recent call last):"; (2) it prints the exception type and value after the stack trace; (3) if type is SyntaxError ...
def calculate_concordance_by_annotation(graph, annotation, key, cutoff=None): return { value: calculate_concordance(subgraph, key, cutoff=cutoff) for value, subgraph in get_subgraphs_by_annotation(graph, annotation).items() }
Returns the concordance scores for each stratified graph based on the given annotation :param pybel.BELGraph graph: A BEL graph :param str annotation: The annotation to group by. :param str key: The node data dictionary key storing the logFC :param float cutoff: The optional logFC cutoff for significan...
def Newline(loc=None): @llrule(loc, lambda parser: ["newline"]) def rule(parser): result = parser._accept("newline") if result is unmatched: return result return [] return rule
A rule that accepts token of kind ``newline`` and returns an empty list.
def FPE(N,rho, k=None): r fpe = rho * (N + k + 1.) / (N- k -1) return fpe
r"""Final prediction error criterion .. math:: FPE(k) = \frac{N + k + 1}{N - k - 1} \rho_k :validation: double checked versus octave.
def set_sim_params(self, nparams, attr_params): for name, value in nparams.items(): val = value[0] if value[0] is not None else 'none' self.h5file.create_array('/parameters', name, obj=val, title=value[1]) for name, value in attr_params.items(...
Store parameters in `params` in `h5file.root.parameters`. `nparams` (dict) A dict as returned by `get_params()` in `ParticlesSimulation()` The format is: keys: used as parameter name values: (2-elements tuple) first element is the ...
def tf_loss(self, states, internals, reward, update, reference=None): prediction = self.predict(states=states, internals=internals, update=update) return tf.nn.l2_loss(t=(prediction - reward))
Creates the TensorFlow operations for calculating the L2 loss between predicted state values and actual rewards. Args: states: Dict of state tensors. internals: List of prior internal state tensors. reward: Reward tensor. update: Boolean tensor indicating...
def apt_key_exists(keyid): gpg_cmd = 'gpg --ignore-time-conflict --no-options --no-default-keyring --keyring /etc/apt/trusted.gpg' with settings(hide('everything'), warn_only=True): res = run('%(gpg_cmd)s --fingerprint %(keyid)s' % locals()) return res.succeeded
Check if the given key id exists in apt keyring.
def add_missing_particles(st, rad='calc', tries=50, **kwargs): if rad == 'calc': rad = guess_add_radii(st) guess, npart = feature_guess(st, rad, **kwargs) tries = np.min([tries, npart]) accepts, new_poses = check_add_particles( st, guess[:tries], rad=rad, **kwargs) return accepts, ne...
Attempts to add missing particles to the state. Operates by: (1) featuring the difference image using feature_guess, (2) attempting to add the featured positions using check_add_particles. Parameters ---------- st : :class:`peri.states.State` The state to check adding particles to. ...
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None): path = REVIEWS_URL + "?doc={}&sort={}".format(requests.utils.quote(packageName), sort) if nb_results is not None: path += "&n={}".format(nb_results) if offset is not None: ...
Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return...
def gce_list_aggregated(service=None, key_name='name', **kwargs): resp_list = [] req = service.aggregatedList(**kwargs) while req is not None: resp = req.execute() for location, item in resp['items'].items(): if key_name in item: resp_list.extend(item[key_name]) ...
General aggregated list function for the GCE service.
def _epd_residual2(coeffs, times, mags, errs, fsv, fdv, fkv, xcc, ycc, bgv, bge, iha, izd): f = _epd_function(coeffs, fsv, fdv, fkv, xcc, ycc, bgv, bge, iha, izd) residual = mags - f return residual
This is the residual function to minimize using scipy.optimize.least_squares. This variant is for :py:func:`.epd_magseries_extparams`.
def ddel_tasks(provider, user_ids=None, job_ids=None, task_ids=None, labels=None, create_time_min=None, create_time_max=None): deleted_tasks, error_messages = provider.delete_jobs( user_ids, job_ids, task_ids, labels, crea...
Kill jobs or job tasks. This function separates ddel logic from flag parsing and user output. Users of ddel who intend to access the data programmatically should use this. Args: provider: an instantiated dsub provider. user_ids: a set of user ids who "own" the job(s) to delete. job_ids: a set of job...
def clique(graph, id): clique = [id] for n in graph.nodes: friend = True for id in clique: if n.id == id or graph.edge(n.id, id) == None: friend = False break if friend: clique.append(n.id) return clique
Returns the largest possible clique for the node with given id.
def save(self): repoInfoPath = os.path.join(self.__path, ".pyrepinfo") try: fdinfo = open(repoInfoPath, 'wb') except Exception as e: raise Exception("unable to open repository info for saving (%s)"%e) try: pickle.dump( self, fdinfo, protocol=2 ) ...
Save repository .pyrepinfo to disk.
def grab(bbox=None, childprocess=None, backend=None): if childprocess is None: childprocess = childprocess_default_value() return _grab( to_file=False, childprocess=childprocess, backend=backend, bbox=bbox)
Copy the contents of the screen to PIL image memory. :param bbox: optional bounding box (x1,y1,x2,y2) :param childprocess: pyscreenshot can cause an error, if it is used on more different virtual displays and back-end is not in different process. Some back-ends are always di...
def search_point(self, lat, lng, filters=None, startDate=None, endDate=None, types=None, type=None): searchAreaWkt = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lng, lat,lng,lat,lng,lat,lng,lat,lng,lat) return self.search(searchAreaWkt=searchAreaWkt, filters=filters, startDate=startDate, endDate=...
Perform a catalog search over a specific point, specified by lat,lng Args: lat: latitude lng: longitude filters: Array of filters. Optional. Example: [ "(sensorPlatformName = 'WORLDVIEW01' OR sensorPlatformName ='QUICKBIRD02')", ...
def _get_key_internal(self, *args, **kwargs): if args[1] is not None and 'force' in args[1]: key, res = super(Bucket, self)._get_key_internal(*args, **kwargs) if key: mimicdb.backend.sadd(tpl.bucket % self.name, key.name) mimicdb.backend.hmset(tpl.key % (s...
Return None if key is not in the bucket set. Pass 'force' in the headers to check S3 for the key, and after fetching the key from S3, save the metadata and key to the bucket set.
def is_lower(self): if not isinstance(self.val, str_types): raise TypeError('val is not a string') if len(self.val) == 0: raise ValueError('val is empty') if self.val != self.val.lower(): self._err('Expected <%s> to contain only lowercase chars, but did not.' ...
Asserts that val is non-empty string and all characters are lowercase.
def copy(src, dst): (szip, dzip) = (src.endswith(".zip"), dst.endswith(".zip")) logging.info("Copy: %s => %s"%(src, dst)) if szip and dzip: shutil.copy2(src, dst) elif szip: with zipfile.ZipFile(src, mode='r') as z: tmpdir = tempfile.mkdtemp() try: ...
File copy that support compress and decompress of zip files
async def receive_json(self, content, **kwargs): if isinstance(content, dict) and "stream" in content and "payload" in content: steam_name = content["stream"] payload = content["payload"] if steam_name not in self.applications_accepting_frames: raise ValueErro...
Rout the message down the correct stream.
def leave(self): if self.joined: p=MucPresence(to_jid=self.room_jid,stanza_type="unavailable") self.manager.stream.send(p)
Send a leave request for the room.
def pretty_eta(seconds_left): minutes_left = seconds_left // 60 seconds_left %= 60 hours_left = minutes_left // 60 minutes_left %= 60 days_left = hours_left // 24 hours_left %= 24 def helper(cnt, name): return "{} {}{}".format(str(cnt), name, ('s' if cnt > 1 else '')) if days_lef...
Print the number of seconds in human readable format. Examples: 2 days 2 hours and 37 minutes less than a minute Paramters --------- seconds_left: int Number of seconds to be converted to the ETA Returns ------- eta: str String representing the pretty ETA.
def backends(cls): allowed = ( keyring for keyring in filter(backend._limit, backend.get_all_keyring()) if not isinstance(keyring, ChainerBackend) and keyring.priority > 0 ) return sorted(allowed, key=backend.by_priority, reverse=True)
Discover all keyrings for chaining.
def neighbors_from_pixelization(self, pixels, ridge_points): return pixelization_util.voronoi_neighbors_from_pixels_and_ridge_points(pixels=pixels, ridge_points=np.asarray(ridge_points))
Compute the neighbors of every Voronoi pixel as an ndarray of the pixel index's each pixel shares a \ vertex with. The ridge points of the Voronoi grid are used to derive this. Parameters ---------- ridge_points : scipy.spatial.Voronoi.ridge_points Each Voronoi-ridg...
def verify_authority(self): try: if not self.blockchain.rpc.verify_authority(self.json()): raise InsufficientAuthorityError except Exception as e: raise e
Verify the authority of the signed transaction
def _prompt_for_values(d): for key, value in d.items(): if isinstance(value, CommentedMap): _prompt_for_values(value) elif isinstance(value, list): for item in value: _prompt_for_values(item) else: typ = type(value) if isinstanc...
Update the descriptive metadata interactively. Uses values entered by the user. Note that the function keeps recursing whenever a value is another ``CommentedMap`` or a ``list``. The function works as passing dictionaries and lists into a function edits the values in place.
def echo(self, gain_in=0.8, gain_out=0.9, n_echos=1, delays=[60], decays=[0.4]): if not is_number(gain_in) or gain_in <= 0 or gain_in > 1: raise ValueError("gain_in must be a number between 0 and 1.") if not is_number(gain_out) or gain_out <= 0 or gain_out > 1: raise...
Add echoing to the audio. Echoes are reflected sound and can occur naturally amongst mountains (and sometimes large buildings) when talking or shouting; digital echo effects emulate this behav- iour and are often used to help fill out the sound of a single instrument or vocal. The time ...
def list(self, all_pages=False, **kwargs): self._separate(kwargs) return super(Resource, self).list(all_pages=all_pages, **kwargs)
Return a list of notification templates. Note here configuration-related fields like 'notification_configuration' and 'channels' will not be used even provided. If one or more filters are provided through keyword arguments, filter the results accordingly. If no filters...
def load_npy_to_any(path='', name='file.npy'): file_path = os.path.join(path, name) try: return np.load(file_path).item() except Exception: return np.load(file_path) raise Exception("[!] Fail to load %s" % file_path)
Load `.npy` file. Parameters ------------ path : str Path to the file (optional). name : str File name. Examples --------- - see tl.files.save_any_to_npy()
def _saliency_map(self, a, image, target, labels, mask, fast=False): alphas = a.gradient(image, target) * mask if fast: betas = -np.ones_like(alphas) else: betas = np.sum([ a.gradient(image, label) * mask - alphas for label in labels], 0) ...
Implements Algorithm 3 in manuscript
def atomic_write(filename): f = _tempfile(os.fsencode(filename)) try: yield f finally: f.close() os.replace(f.name, filename)
Open a NamedTemoraryFile handle in a context manager
def list_trilegal_filtersystems(): print('%-40s %s' % ('FILTER SYSTEM NAME','DESCRIPTION')) print('%-40s %s' % ('------------------','-----------')) for key in sorted(TRILEGAL_FILTER_SYSTEMS.keys()): print('%-40s %s' % (key, TRILEGAL_FILTER_SYSTEMS[key]['desc']))
This just lists all the filter systems available for TRILEGAL.
def convert_concat(params, w_name, scope_name, inputs, layers, weights, names): print('Converting concat ...') concat_nodes = [layers[i] for i in inputs] if len(concat_nodes) == 1: layers[scope_name] = concat_nodes[0] return if names == 'short': tf_name = 'CAT' + random_string(5)...
Convert concatenation. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for ker...
def free(self): if not self.borrowed: self.xmlnode.unlinkNode() self.xmlnode.freeNode() self.xmlnode=None
Unlink and free the XML node owned by `self`.
def adjust_hue(im, hout=0.66, is_offset=True, is_clip=True, is_random=False): hsv = rgb_to_hsv(im) if is_random: hout = np.random.uniform(-hout, hout) if is_offset: hsv[..., 0] += hout else: hsv[..., 0] = hout if is_clip: hsv[..., 0] = np.clip(hsv[..., 0], 0, np.inf) ...
Adjust hue of an RGB image. This is a convenience method that converts an RGB image to float representation, converts it to HSV, add an offset to the hue channel, converts back to RGB and then back to the original data type. For TF, see `tf.image.adjust_hue <https://www.tensorflow.org/api_docs/python/tf/image/...
def intersection(self, other, default=None): x1_i = max(self.x1, other.x1) y1_i = max(self.y1, other.y1) x2_i = min(self.x2, other.x2) y2_i = min(self.y2, other.y2) if x1_i > x2_i or y1_i > y2_i: return default else: return BoundingBox(x1=x1_i, y1=...
Compute the intersection bounding box of this bounding box and another one. Note that in extreme cases, the intersection can be a single point, meaning that the intersection bounding box will exist, but then also has a height and width of zero. Parameters ---------- other : img...
def pickByDistribution(distribution, r=None): if r is None: r = random x = r.uniform(0, sum(distribution)) for i, d in enumerate(distribution): if x <= d: return i x -= d
Pick a value according to the provided distribution. Example: :: pickByDistribution([.2, .1]) Returns 0 two thirds of the time and 1 one third of the time. :param distribution: Probability distribution. Need not be normalized. :param r: Instance of random.Random. Uses the system instance if one is ...
def prep(ctx, commit=True): cfg = config.load() scm = scm_provider(cfg.project_root, commit=commit, ctx=ctx) if not scm.workdir_is_clean(): notify.failure("You have uncommitted changes, please commit or stash them!") setup_cfg = cfg.rootjoin('setup.cfg') if os.path.exists(setup_cfg): ...
Prepare for a release.
def getTemporalDelay(inferenceElement, key=None): if inferenceElement in (InferenceElement.prediction, InferenceElement.encodings): return 1 if inferenceElement in (InferenceElement.anomalyScore, InferenceElement.anomalyLabel, ...
Returns the number of records that elapse between when an inference is made and when the corresponding input record will appear. For example, a multistep prediction for 3 timesteps out will have a delay of 3 Parameters: ----------------------------------------------------------------------- infer...
def flip(self, axis=HORIZONTAL): if axis == HORIZONTAL: self.img = self.img.transpose(Image.FLIP_LEFT_RIGHT) if axis == VERTICAL: self.img = self.img.transpose(Image.FLIP_TOP_BOTTOM)
Flips the layer, either HORIZONTAL or VERTICAL.
def launchapp(self, cmd, args=[], delay=0, env=1, lang="C"): try: atomac.NativeUIElement.launchAppByBundleId(cmd) return 1 except RuntimeError: if atomac.NativeUIElement.launchAppByBundlePath(cmd, args): try: time.sleep(int(delay)) ...
Launch application. @param cmd: Command line string to execute. @type cmd: string @param args: Arguments to the application @type args: list @param delay: Delay after the application is launched @type delay: int @param env: GNOME accessibility environment to be s...
def from_spec(spec): exploration = util.get_object( obj=spec, predefined_objects=tensorforce.core.explorations.explorations ) assert isinstance(exploration, Exploration) return exploration
Creates an exploration object from a specification dict.
def set_physical_plan(self, physical_plan): if not physical_plan: self.physical_plan = None self.id = None else: self.physical_plan = physical_plan self.id = physical_plan.topology.id self.trigger_watches()
set physical plan
def tempo(self, factor, audio_type=None, quick=False): if not is_number(factor) or factor <= 0: raise ValueError("factor must be a positive number") if factor < 0.5 or factor > 2: logger.warning( "Using an extreme time stretching factor. " "Quality...
Time stretch audio without changing pitch. This effect uses the WSOLA algorithm. The audio is chopped up into segments which are then shifted in the time domain and overlapped (cross-faded) at points where their waveforms are most similar as determined by measurement of least squares. ...
def load(self, reload=False, require_load=False): if reload: self.config = None if self.config: self._log.debug('Returning cached config instance. Use ' '``reload=True`` to avoid caching!') return path = self._effective_path() ...
Searches for an appropriate config file. If found, loads the file into the current instance. This method can also be used to reload a configuration. Note that you may want to set ``reload`` to ``True`` to clear the configuration before loading in that case. Without doing that, values wi...
def mock_request(): current_site = Site.objects.get_current() request = HttpRequest() request.META['SERVER_NAME'] = current_site.domain return request
Generate a fake request object to allow oEmbeds to use context processors.
def wait_for( self, timeout=10000, interval=1000, asserter=lambda x: x): if not callable(asserter): raise TypeError('Asserter must be callable.') @retry( retry_on_exception=lambda ex: isinstance(ex, WebDriverException), stop_max_delay=timeout, ...
Wait for driver till satisfy the given condition Support: Android iOS Web(WebView) Args: timeout(int): How long we should be retrying stuff. interval(int): How long between retries. asserter(callable): The asserter func to determine the result. ...
def _raw_at_zoom(config, zooms): params_per_zoom = {} for zoom in zooms: params = {} for name, element in config.items(): if name not in _RESERVED_PARAMETERS: out_element = _element_at_zoom(name, element, zoom) if out_element is not None: ...
Return parameter dictionary per zoom level.
def _add_uninstall(self, context): contents = self._render_template('uninstall.sh', context) self.config.setdefault('files', []) self._add_unique_file({ "path": "/uninstall.sh", "contents": contents, "mode": "755" })
generates uninstall.sh and adds it to included files
def embed_code_links(app, exception): if exception is not None: return if not app.builder.config.plot_gallery: return if app.builder.name not in ['html', 'readthedocs']: return print('Embedding documentation hyperlinks in examples..') gallery_conf = app.config.sphinx_gallery_...
Embed hyperlinks to documentation into example code
def SETGE(cpu, dest): dest.write(Operators.ITEBV(dest.size, cpu.SF == cpu.OF, 1, 0))
Sets byte if greater or equal. :param cpu: current CPU. :param dest: destination operand.
def _data_analysis(self, data_view_id): failure_message = "Error while retrieving data analysis for data view {}".format(data_view_id) return self._get_success_json(self._get(routes.data_analysis(data_view_id), failure_message=failure_message))
Data analysis endpoint. :param data_view_id: The model identifier (id number for data views) :type data_view_id: str :return: dictionary containing information about the data, e.g. dCorr and tsne
def call_method_with_acl(self, method_name, packet, *args): if not self.is_method_allowed(method_name): self.error('method_access_denied', 'You do not have access to method "%s"' % method_name) return return self.call_method(method_name, packet, *args)
You should always use this function to call the methods, as it checks if the user is allowed according to the ACLs. If you override :meth:`process_packet` or :meth:`process_event`, you should definitely want to use this instead of ``getattr(self, 'my_method')()``
def flag_inner_classes(obj): for tup in class_members(obj): tup[1]._parent = obj tup[1]._parent_inst = None tup[1].__getattr__ = my_getattr flag_inner_classes(tup[1])
Mutates any attributes on ``obj`` which are classes, with link to ``obj``. Adds a convenience accessor which instantiates ``obj`` and then calls its ``setup`` method. Recurses on those objects as well.
def proto_02_03_IVfast(abf=exampleABF): av1,sd1=swhlab.plot.IV(abf,.6,.9,True) swhlab.plot.save(abf,tag='iv1') Xs=abf.clampValues(.6) abf.saveThing([Xs,av1],'iv')
fast sweeps, 1 step per sweep, for clean IV without fast currents.
def calc_inbag(n_samples, forest): if not forest.bootstrap: e_s = "Cannot calculate the inbag from a forest that has " e_s = " bootstrap=False" raise ValueError(e_s) n_trees = forest.n_estimators inbag = np.zeros((n_samples, n_trees)) sample_idx = [] for t_idx in range(n_tree...
Derive samples used to create trees in scikit-learn RandomForest objects. Recovers the samples in each tree from the random state of that tree using :func:`forest._generate_sample_indices`. Parameters ---------- n_samples : int The number of samples used to fit the scikit-learn RandomFores...
def take_shas_of_all_files(G, settings): global ERROR_FN sprint = settings["sprint"] error = settings["error"] ERROR_FN = error sha_dict = {} all_files = [] for target in G.nodes(data=True): sprint("About to take shas of files in target '{}'".format(target[0]), level="...
Takes sha1 hash of all dependencies and outputs of all targets Args: The graph we are going to build The settings dictionary Returns: A dictionary where the keys are the filenames and the value is the sha1 hash
def pruneUI(dupeList, mainPos=1, mainLen=1): dupeList = sorted(dupeList) print for pos, val in enumerate(dupeList): print "%d) %s" % (pos + 1, val) while True: choice = raw_input("[%s/%s] Keepers: " % (mainPos, mainLen)).strip() if not choice: print ("Please enter a s...
Display a list of files and prompt for ones to be kept. The user may enter ``all`` or one or more numbers separated by spaces and/or commas. .. note:: It is impossible to accidentally choose to keep none of the displayed files. :param dupeList: A list duplicate file paths :param mainPos: ...
def add_droplets(self, droplet): droplets = droplet if not isinstance(droplets, list): droplets = [droplet] resources = self.__extract_resources_from_droplets(droplets) if len(resources) > 0: return self.__add_resources(resources) return False
Add the Tag to a Droplet. Attributes accepted at creation time: droplet: array of string or array of int, or array of Droplets.
def type(self): robot_tables = [table for table in self.tables if not isinstance(table, UnknownTable)] if len(robot_tables) == 0: return None for table in self.tables: if isinstance(table, TestcaseTable): return "suite" return "resource"
Return 'suite' or 'resource' or None This will return 'suite' if a testcase table is found; It will return 'resource' if at least one robot table is found. If no tables are found it will return None
def rdkitmol_Hs(self): r if self.__rdkitmol_Hs: return self.__rdkitmol_Hs else: try: self.__rdkitmol_Hs = Chem.AddHs(self.rdkitmol) return self.__rdkitmol_Hs except: return None
r'''RDKit object of the chemical, with hydrogen. If RDKit is not available, holds None. For examples of what can be done with RDKit, see `their website <http://www.rdkit.org/docs/GettingStartedInPython.html>`_.
def record_conflict(self, assignment, var, val, delta): "Record conflicts caused by addition or deletion of a Queen." n = len(self.vars) self.rows[val] += delta self.downs[var + val] += delta self.ups[var - val + n - 1] += delta
Record conflicts caused by addition or deletion of a Queen.
def missing_particle(separation=0.0, radius=RADIUS, SNR=20): s = init.create_two_particle_state(imsize=6*radius+4, axis='x', sigma=1.0/SNR, delta=separation, radius=radius, stateargs={'varyn': True}, psfargs={'error': 1e-6}) s.obj.typ[1] = 0. s.reset() return s, s.obj.pos.copy()
create a two particle state and compare it to featuring using a single particle guess
def _create_element_list_(self): element_set = stoich.elements(self.compounds) return sorted(list(element_set))
Extract an alphabetically sorted list of elements from the compounds of the material. :returns: An alphabetically sorted list of elements.
def div_img(img1, div2): if is_img(div2): return img1.get_data()/div2.get_data() elif isinstance(div2, (float, int)): return img1.get_data()/div2 else: raise NotImplementedError('Cannot divide {}({}) by ' '{}({})'.format(type(img1), ...
Pixelwise division or divide by a number
def create(self): data = { "name": self.name, "ip_address": self.ip_address, } domain = self.get_data("domains", type=POST, params=data) return domain
Create new doamin
def present(self, results): "Present the results as a list." for (score, d) in results: doc = self.documents[d] print ("%5.2f|%25s | %s" % (100 * score, doc.url, doc.title[:45].expandtabs()))
Present the results as a list.
def mmPrettyPrintConnections(self): text = "" text += ("Segments: (format => " "( text += "------------------------------------\n" columns = range(self.numberOfColumns()) for column in columns: cells = self.cellsForColumn(column) for cell in cells: segmentDict = dict...
Pretty print the connections in the temporal memory. TODO: Use PrettyTable. @return (string) Pretty-printed text
def sim(self, src, tar): if src == tar: return 1.0 if not src or not tar: return 0.0 return ( len(src) / len(tar) if len(src) < len(tar) else len(tar) / len(src) )
Return the length similarity of two strings. Length similarity is the ratio of the length of the shorter string to the longer. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison Returns ...
def predict_proba(self, a, b, **kwargs): return self.b_fit_score(b, a) - self.b_fit_score(a, b)
Infer causal relationships between 2 variables using the RECI statistic :param a: Input variable 1 :param b: Input variable 2 :return: Causation coefficient (Value : 1 if a->b and -1 if b->a) :rtype: float
def add(self, constraint, check=False): if isinstance(constraint, bool): constraint = BoolConstant(constraint) assert isinstance(constraint, Bool) constraint = simplify(constraint) if self._child is not None: raise Exception('ConstraintSet is frozen') if i...
Add a constraint to the set :param constraint: The constraint to add to the set. :param check: Currently unused. :return:
def detail_view(self, request): context = { 'preview': self, } kwargs = {} if self.form_class: if request.GET: form = self.form_class(data=request.GET) else: form = self.form_class() context['form'] = form ...
Renders the message view to a response.
def calculate(self, T, P, zs, ws, method): r if method == SIMPLE: ks = [i(T, P) for i in self.ThermalConductivityLiquids] return mixing_simple(zs, ks) elif method == DIPPR_9H: ks = [i(T, P) for i in self.ThermalConductivityLiquids] return DIPPR9H(w...
r'''Method to calculate thermal conductivity of a liquid mixture at temperature `T`, pressure `P`, mole fractions `zs` and weight fractions `ws` with a given method. This method has no exception handling; see `mixture_property` for that. Parameters ---------- T...
def allow(self, role, method, resource, with_children=True): if with_children: for r in role.get_children(): permission = (r.get_name(), method, resource) if permission not in self._allowed: self._allowed.append(permission) if role == 'anon...
Add allowing rules. :param role: Role of this rule. :param method: Method to allow in rule, include GET, POST, PUT etc. :param resource: Resource also view function. :param with_children: Allow role's children in rule as well if with_children is `True`
def save(self, sess, save_path, timestep=None): if self._saver is None: raise TensorForceError("register_saver_ops should be called before save") return self._saver.save( sess=sess, save_path=save_path, global_step=timestep, write_meta_graph=Fa...
Saves this component's managed variables. Args: sess: The session for which to save the managed variables. save_path: The path to save data to. timestep: Optional, the timestep to append to the file name. Returns: Checkpoint path where the model was save...
def trun_to_file(trun, fpath=None): if fpath is None: fpath = yml_fpath(trun["conf"]["OUTPUT"]) with open(fpath, 'w') as yml_file: data = yaml.dump(trun, explicit_start=True, default_flow_style=False) yml_file.write(data)
Dump the given trun to file
def get_homogeneous(package_descriptors, targets, repos_data): homogeneous = {} for package_descriptor in package_descriptors.values(): pkg_name = package_descriptor.pkg_name debian_pkg_name = package_descriptor.debian_pkg_name versions = [] for repo_data in repos_data: ...
For each package check if the version in one repo is equal for all targets. The version could be different in different repos though. :return: a dict indexed by package names containing a boolean flag
def copyto(self, new_abspath=None, new_dirpath=None, new_dirname=None, new_basename=None, new_fname=None, new_ext=None, overwrite=False, makedirs=False): self.assert_exists() p = self....
Copy this file to other place.
def apt(self, package_names, raise_on_error=False): if isinstance(package_names, basestring): package_names = [package_names] cmd = "apt-get install -y %s" % (' '.join(package_names)) return self.wait(cmd, raise_on_error=raise_on_error)
Install specified packages using apt-get. -y options are automatically used. Waits for command to finish. Parameters ---------- package_names: list-like of str raise_on_error: bool, default False If True then raise ValueError if stderr is not empty debcon...
def close(self): self.require_not_closed() if not self.streaming or self.asynchronous: if 'Content-Length' not in self.headers: self.headers['Content-Length'] = self.tell() self.flush() self._closed = True
Flush and close the stream. This is called automatically by the base resource on resources unless the resource is operating asynchronously; in that case, this method MUST be called in order to signal the end of the request. If not the request will simply hang as it is waiting for some ...
def fail(message=None, exit_status=None): print('Error:', message, file=sys.stderr) sys.exit(exit_status or 1)
Prints the specified message and exits the program with the specified exit status.
def _generateRangeDescription(self, ranges): desc = "" numRanges = len(ranges) for i in xrange(numRanges): if ranges[i][0] != ranges[i][1]: desc += "%.2f-%.2f" % (ranges[i][0], ranges[i][1]) else: desc += "%.2f" % (ranges[i][0]) if i < numRanges - 1: desc += ", " ...
generate description from a text description of the ranges
def get(self, name): for c in self.comps: if c.category == name: return c return None
Return component by category name
def get_unique_pathname(path, root=''): path = os.path.join(root, path) potentialPaths = itertools.chain((path,), __get_numbered_paths(path)) potentialPaths = six.moves.filterfalse(os.path.exists, potentialPaths) return next(potentialPaths)
Return a pathname possibly with a number appended to it so that it is unique in the directory.
def compare_signature(expected: Union[str, bytes], actual: Union[str, bytes]) -> bool: expected = util.to_bytes(expected) actual = util.to_bytes(actual) return hmac.compare_digest(expected, actual)
Compares the given signatures. :param expected: The expected signature. :type expected: Union[str, bytes] :param actual: The actual signature. :type actual: Union[str, bytes] :return: Do the signatures match? :rtype: bool
def unhex(s): bits = 0 for c in s: if '0' <= c <= '9': i = ord('0') elif 'a' <= c <= 'f': i = ord('a')-10 elif 'A' <= c <= 'F': i = ord('A')-10 else: break bits = bits*16 + (ord(c) - i) return bits
Get the integer value of a hexadecimal number.
def env_key(key, default): env = key.upper().replace('.', '_') return os.environ.get(env, default)
Try to get `key` from the environment. This mutates `key` to replace dots with underscores and makes it all uppercase. my.database.host => MY_DATABASE_HOST
def simplified_edges(self): for group, edgelist in self.edges.items(): for u, v, d in edgelist: yield (u, v)
A generator for getting all of the edges without consuming extra memory.
def pop(self, key, default=_sentinel): if default is not _sentinel: tup = self._data.pop(key.lower(), default) else: tup = self._data.pop(key.lower()) if tup is not default: return tup[1] else: return default
Removes the specified key and returns the corresponding value. If key is not found, the default is returned if given, otherwise KeyError is raised. :param key: The key :param default: The default value :return: The value
def create_field(subfields=None, ind1=' ', ind2=' ', controlfield_value='', global_position=-1): if subfields is None: subfields = [] ind1, ind2 = _wash_indicators(ind1, ind2) field = (subfields, ind1, ind2, controlfield_value, global_position) _check_field_validity(field) r...
Return a field created with the provided elements. Global position is set arbitrary to -1.
def update(x, **entries): if isinstance(x, dict): x.update(entries) else: x.__dict__.update(entries) return x
Update a dict, or an object with slots, according to `entries` dict. >>> update({'a': 1}, a=10, b=20) {'a': 10, 'b': 20} >>> update(Struct(a=1), a=10, b=20) Struct(a=10, b=20)
def save(self, *args, **kwargs): current_activable_value = getattr(self, self.ACTIVATABLE_FIELD_NAME) is_active_changed = self.id is None or self.__original_activatable_value != current_activable_value self.__original_activatable_value = current_activable_value ret_val = super(BaseActiva...
A custom save method that handles figuring out when something is activated or deactivated.
def expire_password(self, username): r = self.local_renderer r.env.username = username r.sudo('chage -d 0 {username}')
Forces the user to change their password the next time they login.
def config2(self): config = [] data = {} self.cnxn.xfer([0x3D]) sleep(10e-3) for i in range(9): resp = self.cnxn.xfer([0x00])[0] config.append(resp) data["AMSamplingInterval"] = self._16bit_unsigned(config[0], config[1]) data["AMId...
Read the second set of configuration variables and return as a dictionary. **NOTE: This method is supported by firmware v18+.** :rtype: dictionary :Example: >>> a.config2() { 'AMFanOnIdle': 0, 'AMIdleIntervalCount': 0, 'AMMaxDataArraysInFil...
def filter(names, pat): import os result=[] try: re_pat = _cache[pat] except KeyError: res = translate(pat) if len(_cache) >= _MAXCACHE: globals()['_cache'] = {} _cache[pat] = re_pat = re.compile(res) match = re_pat.match if 1: for name in name...
Return the subset of the list NAMES that match PAT