code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def simple_db_engine(reader=None, srnos=None): """engine that gets values from the simple excel 'db'""" if reader is None: reader = dbreader.Reader() logger.debug("No reader provided. Creating one myself.") info_dict = dict() info_dict["filenames"] = [reader.get_cell_name(srno) for srn...
engine that gets values from the simple excel 'db
def djfrontend_jquery_formset(version=None): """ Returns the jQuery Dynamic Formset plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_FORMSET', DJFRONTEND_JQUERY_F...
Returns the jQuery Dynamic Formset plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file.
def get_project_totals(entries, date_headers, hour_type=None, overtime=False, total_column=False, by='user'): """ Yield hour totals grouped by user and date. Optionally including overtime. """ totals = [0 for date in date_headers] rows = [] for thing, thing_entries in grou...
Yield hour totals grouped by user and date. Optionally including overtime.
def _create_storage_profile(self): """ Create the storage profile for the instance. Image reference can be a custom image name or a published urn. """ if self.image_publisher: storage_profile = { 'image_reference': { 'publisher': s...
Create the storage profile for the instance. Image reference can be a custom image name or a published urn.
def inv_n(x): '''given N matrices, return N inverses''' # # The inverse of a small matrix (e.g. 3x3) is # # 1 # ----- C(j,i) # det(A) # # where C(j,i) is the cofactor of matrix A at position j,i # assert x.ndim == 3 assert x.shape[1] == x.shape[2] c = np.array([ [...
given N matrices, return N inverses
def preprocess(self, x): """Load a single example using this field, tokenizing if necessary. If the input is a Python 2 `str`, it will be converted to Unicode first. If `sequential=True`, it will be tokenized. Then the input will be optionally lowercased and passed to the user-provided ...
Load a single example using this field, tokenizing if necessary. If the input is a Python 2 `str`, it will be converted to Unicode first. If `sequential=True`, it will be tokenized. Then the input will be optionally lowercased and passed to the user-provided `preprocessing` Pipeline.
def check_xml(code): """Yield errors.""" try: xml.etree.ElementTree.fromstring(code) except xml.etree.ElementTree.ParseError as exception: message = '{}'.format(exception) line_number = 0 found = re.search(r': line\s+([0-9]+)[^:]*$', message) if found: li...
Yield errors.
def upcoming( cls, api_key=djstripe_settings.STRIPE_SECRET_KEY, customer=None, coupon=None, subscription=None, subscription_plan=None, subscription_prorate=None, subscription_proration_date=None, subscription_quantity=None, subscription_trial_end=None, **kwargs ): """ Gets the upcoming previe...
Gets the upcoming preview invoice (singular) for a customer. At any time, you can preview the upcoming invoice for a customer. This will show you all the charges that are pending, including subscription renewal charges, invoice item charges, etc. It will also show you any discount that is applicable to the c...
def convert_all(self): '''Convert all links in URL table.''' for url_record in self._url_table.get_all(): if url_record.status != Status.done: continue self.convert_by_record(url_record)
Convert all links in URL table.
def upsert_event(self, calendar_id, event): """Inserts or updates an event for the specified calendar. :param string calendar_id: ID of calendar to insert/update event into. :param dict event: Dictionary of event data to send to cronofy. """ event['start'] = format_event_time(ev...
Inserts or updates an event for the specified calendar. :param string calendar_id: ID of calendar to insert/update event into. :param dict event: Dictionary of event data to send to cronofy.
def generate_passphrase(size=12): """Return a generate string `size` long based on lowercase, uppercase, and digit chars """ chars = string.ascii_lowercase + string.ascii_uppercase + string.digits return str(''.join(random.choice(chars) for _ in range(size)))
Return a generate string `size` long based on lowercase, uppercase, and digit chars
def bound(self, p1, p2=None): """Bound this point within the rect defined by (`p1`, `p2`).""" r = Rect(p1, p2) return Point(min(max(self.x, r.l), r.r), min(max(self.y, r.t), r.b))
Bound this point within the rect defined by (`p1`, `p2`).
def s_demand(self, bus): """ Returns the total complex power demand. """ Svl = array([complex(g.p, g.q) for g in self.generators if (g.bus == bus) and g.is_load], dtype=complex64) Sd = complex(bus.p_demand, bus.q_demand) return -sum(Svl) + Sd
Returns the total complex power demand.
def add_alt(self, entry): """Parse and store the alternative allele field""" entry = entry[7:-1] info = entry.split(',') if len(info) < 2: return False for v in info: key, value = v.split('=', 1) if key == 'ID': self.alt[value] ...
Parse and store the alternative allele field
def _equal_values(self, val1, val2): """Matrices are equal if they hash to the same value.""" if self._is_supported_matrix(val1): if self._is_supported_matrix(val2): _, _, hash_tuple_1 = self._serialize_matrix(val1) _, _, hash_tuple_2 = self._serialize_matrix...
Matrices are equal if they hash to the same value.
def simxPackFloats(floatList): ''' Please have a look at the function description/documentation in the V-REP user manual ''' if sys.version_info[0] == 3: s=bytes() for i in range(len(floatList)): s=s+struct.pack('<f',floatList[i]) s=bytearray(s) else: s='...
Please have a look at the function description/documentation in the V-REP user manual
def open_zip(cls, dbname, zipped, encoding=None, fieldnames_lower=True, case_sensitive=True): """Context manager. Allows opening a .dbf file from zip archive. .. code-block:: with Dbf.open_zip('some.dbf', 'myarch.zip') as dbf: ... :param str|unicode dbname: .dbf fi...
Context manager. Allows opening a .dbf file from zip archive. .. code-block:: with Dbf.open_zip('some.dbf', 'myarch.zip') as dbf: ... :param str|unicode dbname: .dbf file name :param str|unicode|file zipped: .zip file path or a file-like object. :param st...
def union(cls): """A class decorator which other classes can specify that they can resolve to with `UnionRule`. Annotating a class with @union allows other classes to use a UnionRule() instance to indicate that they can be resolved to this base union class. This class will never be instantiated, and should hav...
A class decorator which other classes can specify that they can resolve to with `UnionRule`. Annotating a class with @union allows other classes to use a UnionRule() instance to indicate that they can be resolved to this base union class. This class will never be instantiated, and should have no members -- it is...
def do_find(self, arg): """ [~process] f <string> - find the string in the process memory [~process] find <string> - find the string in the process memory """ if not arg: raise CmdError("missing parameter: string") process = self.get_process_from_prefix() ...
[~process] f <string> - find the string in the process memory [~process] find <string> - find the string in the process memory
def write(self, bytes_): """Write bytes to the file.""" string = bytes_.decode(self._encoding) self._file.write(string)
Write bytes to the file.
def create_stack_user(self): """Create the stack user on the machine. """ self.run('adduser -m stack', success_status=(0, 9)) self.create_file('/etc/sudoers.d/stack', 'stack ALL=(root) NOPASSWD:ALL\n') self.run('mkdir -p /home/stack/.ssh') self.run('cp /root/.ssh/authoriz...
Create the stack user on the machine.
def __set_transaction_detail(self, *args, **kwargs): """ Checks kwargs for 'customer_transaction_id' and sets it if present. """ customer_transaction_id = kwargs.get('customer_transaction_id', None) if customer_transaction_id: transaction_detail = self.client.factory...
Checks kwargs for 'customer_transaction_id' and sets it if present.
def shift_or_mirror_into_invertible_domain(self, solution_genotype, copy=False): """Details: input ``solution_genotype`` is changed. The domain is [lb - al, ub + au] and in [lb - 2*al - (ub - lb) / 2, lb - al] mirroring is applied. """ ...
Details: input ``solution_genotype`` is changed. The domain is [lb - al, ub + au] and in [lb - 2*al - (ub - lb) / 2, lb - al] mirroring is applied.
def d_step(self, true_frames, gen_frames): """Performs the discriminator step in computing the GAN loss. Applies stop-gradient to the generated frames while computing the discriminator loss to make sure that the gradients are not back-propagated to the generator. This makes sure that only the discrimin...
Performs the discriminator step in computing the GAN loss. Applies stop-gradient to the generated frames while computing the discriminator loss to make sure that the gradients are not back-propagated to the generator. This makes sure that only the discriminator is updated. Args: true_frames: Tru...
def list_documents(self, page_size=None): """List all subdocuments of the current collection. Args: page_size (Optional[int]]): The maximum number of documents in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value se...
List all subdocuments of the current collection. Args: page_size (Optional[int]]): The maximum number of documents in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. Returns: Sequence[...
def clear(self): """ Cleans up the manager. The manager can't be used after this method has been called """ self.services.clear() self._future_value.clear() self.services = None self._lock = None self._ipopo_instance = None self._context =...
Cleans up the manager. The manager can't be used after this method has been called
def is_readable(value, **kwargs): """Indicate whether ``value`` is a readable file. .. caution:: **Use of this validator is an anti-pattern and should be used with caution.** Validating the readability of a file *before* attempting to read it exposes your code to a bug called `TOCTOU ...
Indicate whether ``value`` is a readable file. .. caution:: **Use of this validator is an anti-pattern and should be used with caution.** Validating the readability of a file *before* attempting to read it exposes your code to a bug called `TOCTOU <https://en.wikipedia.org/wiki/Time_of_ch...
def process_action(self, request, queryset): """ Deletes the object(s). Successful deletes are logged. Returns a 'render redirect' to the result of the `get_done_url` method. If a ProtectedError is raised, the `render` method is called with message explaining the error a...
Deletes the object(s). Successful deletes are logged. Returns a 'render redirect' to the result of the `get_done_url` method. If a ProtectedError is raised, the `render` method is called with message explaining the error added to the context as `protected`.
def __set_bp(self, aProcess): """ Sets the target pages as guard pages. @type aProcess: L{Process} @param aProcess: Process object. """ lpAddress = self.get_address() dwSize = self.get_size() flNewProtect = aProcess.mquery(lpAddress).Protect ...
Sets the target pages as guard pages. @type aProcess: L{Process} @param aProcess: Process object.
def _create_row_labels(self): """ Take the original labels for rows. Rename if alternative labels are provided. Append label suffix if label_suffix is True. Returns ---------- labels : dictionary Dictionary, keys are original column name, values are final la...
Take the original labels for rows. Rename if alternative labels are provided. Append label suffix if label_suffix is True. Returns ---------- labels : dictionary Dictionary, keys are original column name, values are final label.
def validate_object(obj, field_validators=None, non_field_validators=None, schema=None, context=None): """ Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur. """ if schema is None: schema = {} ...
Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur.
def _parse_mtllibs(self): """Load mtl files""" for mtllib in self.meta.mtllibs: try: materials = self.material_parser_cls( os.path.join(self.path, mtllib), encoding=self.encoding, strict=self.strict).materials ...
Load mtl files
def dump(self, filename): """Dump counters to file""" try: with open(filename, 'wb') as fp: cPickle.dump(self.counters, fp) except Exception as e: logging.warning("can't dump counter to file %s: %s", filename, e) return False return Tru...
Dump counters to file
def imresize(self, data, new_wd, new_ht, method='bilinear'): """Scale an image in numpy array _data_ to the specified width and height. A smooth scaling is preferred. """ old_ht, old_wd = data.shape[:2] start_time = time.time() if have_pilutil: means = 'PIL'...
Scale an image in numpy array _data_ to the specified width and height. A smooth scaling is preferred.
def _landsat_get_mtl(sceneid): """ Get Landsat-8 MTL metadata. Attributes ---------- sceneid : str Landsat sceneid. For scenes after May 2017, sceneid have to be LANDSAT_PRODUCT_ID. Returns ------- out : dict returns a JSON like object with the metadata. ""...
Get Landsat-8 MTL metadata. Attributes ---------- sceneid : str Landsat sceneid. For scenes after May 2017, sceneid have to be LANDSAT_PRODUCT_ID. Returns ------- out : dict returns a JSON like object with the metadata.
def hamming_emd(d1, d2): """Return the Earth Mover's Distance between two distributions (indexed by state, one dimension per node) using the Hamming distance between states as the transportation cost function. Singleton dimensions are sqeezed out. """ N = d1.squeeze().ndim d1, d2 = flatten(...
Return the Earth Mover's Distance between two distributions (indexed by state, one dimension per node) using the Hamming distance between states as the transportation cost function. Singleton dimensions are sqeezed out.
def _on_library_path_changed(self, renderer, path, new_library_path): """Callback handling a change of a library path :param Gtk.CellRenderer renderer: Cell renderer showing the library path :param path: Path of library within the list store :param str new_library_path: New library path...
Callback handling a change of a library path :param Gtk.CellRenderer renderer: Cell renderer showing the library path :param path: Path of library within the list store :param str new_library_path: New library path
def get_message_actions(current): """ Returns applicable actions for current user for given message key .. code-block:: python # request: { 'view':'_zops_get_message_actions', 'key': key, } # response: { 'actions':[('name_stri...
Returns applicable actions for current user for given message key .. code-block:: python # request: { 'view':'_zops_get_message_actions', 'key': key, } # response: { 'actions':[('name_string', 'cmd_string'),] 'status': str...
def _get_optimizer(self): """Uses Adagrad to optimize the GloVe/Mittens objective, as specified in the GloVe paper. """ optim = tf.train.AdagradOptimizer(self.learning_rate) gradients = optim.compute_gradients(self.cost) if self.log_dir: for name, (g, v) in zi...
Uses Adagrad to optimize the GloVe/Mittens objective, as specified in the GloVe paper.
def ssn(self): """ Returns a 13 digits Swiss SSN named AHV (German) or AVS (French and Italian) See: http://www.bsv.admin.ch/themen/ahv/00011/02185/ """ def _checksum(digits): evensum = sum(digits[:-1:2]) oddsum ...
Returns a 13 digits Swiss SSN named AHV (German) or AVS (French and Italian) See: http://www.bsv.admin.ch/themen/ahv/00011/02185/
def _validate_sample_rates(input_filepath_list, combine_type): ''' Check if files in input file list have the same sample rate ''' sample_rates = [ file_info.sample_rate(f) for f in input_filepath_list ] if not core.all_equal(sample_rates): raise IOError( "Input files do ...
Check if files in input file list have the same sample rate
def pca_plot(pca, dt, xlabs=None, mode='scatter', lognorm=True): """ Plot a fitted PCA, and all components. """ nc = pca.n_components f = np.arange(pca.n_features_) cs = list(itertools.combinations(range(nc), 2)) ind = ~np.apply_along_axis(any, 1, np.isnan(dt)) cylim = (pca.co...
Plot a fitted PCA, and all components.
def spill(self, src, dest): """ Spill a workspace, i.e. unpack it and turn it into a workspace. See https://ocr-d.github.com/ocrd_zip#unpacking-ocrd-zip-to-a-workspace Arguments: src (string): Path to OCRD-ZIP dest (string): Path to directory to unpack data fold...
Spill a workspace, i.e. unpack it and turn it into a workspace. See https://ocr-d.github.com/ocrd_zip#unpacking-ocrd-zip-to-a-workspace Arguments: src (string): Path to OCRD-ZIP dest (string): Path to directory to unpack data folder to
def create_jinja_env(): """Create a Jinja2 `~jinja2.Environment`. Returns ------- env : `jinja2.Environment` Jinja2 template rendering environment, configured to use templates in ``templates/``. """ template_dir = os.path.join(os.path.dirname(__file__), 'templates') env = ji...
Create a Jinja2 `~jinja2.Environment`. Returns ------- env : `jinja2.Environment` Jinja2 template rendering environment, configured to use templates in ``templates/``.
def print(self, tag=None, name=None): """ Prints each tuple to stdout flushing after each tuple. If `tag` is not `None` then each tuple has "tag: " prepended to it before printing. Args: tag: A tag to prepend to each tuple. name(str): Name of the resulti...
Prints each tuple to stdout flushing after each tuple. If `tag` is not `None` then each tuple has "tag: " prepended to it before printing. Args: tag: A tag to prepend to each tuple. name(str): Name of the resulting stream. When `None` defaults to a gener...
def get_tops(self): ''' Gather the top files ''' tops = DefaultOrderedDict(list) include = DefaultOrderedDict(list) done = DefaultOrderedDict(list) found = 0 # did we find any contents in the top files? # Gather initial top files merging_strategy ...
Gather the top files
def pretty_dumps(data): """Return json string in pretty format. **中文文档** 将字典转化成格式化后的字符串。 """ try: return json.dumps(data, sort_keys=True, indent=4, ensure_ascii=False) except: return json.dumps(data, sort_keys=True, indent=4, ensure_ascii=True)
Return json string in pretty format. **中文文档** 将字典转化成格式化后的字符串。
def calc_nested_probs(nest_coefs, index_coefs, design, rows_to_obs, rows_to_nests, chosen_row_to_obs=None, return_type="long_probs", *args, **kw...
Parameters ---------- nest_coefs : 1D or 2D ndarray. All elements should by ints, floats, or longs. If 1D, should have 1 element for each nesting coefficient being estimated. If 2D, should have 1 column for each set of nesting coefficients being used to predict the probabilities ...
def find_usage(self): """ Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`. """ logger.debug("Checking usage for service %s", self.service_name) self.connect() for lim i...
Determine the current usage for each limit of this service, and update corresponding Limit via :py:meth:`~.AwsLimit._add_current_usage`.
def read(self, length): """ Read as many bytes from socket as specified in length. Loop as long as every byte is read unless exception is raised. """ data = bytearray() while len(data) != length: data += self.sock.recv((length - len(data))) if not ...
Read as many bytes from socket as specified in length. Loop as long as every byte is read unless exception is raised.
def get_index(self, index, type, alias=None, typed=None, read_only=True, kwargs=None): """ TESTS THAT THE INDEX EXISTS BEFORE RETURNING A HANDLE """ if kwargs.tjson != None: Log.error("used `typed` parameter, not `tjson`") if read_only: # GET EXACT MATCH, ...
TESTS THAT THE INDEX EXISTS BEFORE RETURNING A HANDLE
def check(self): """ Check if data and third party tools are available :raises: RuntimeError """ #for path in self.path.values(): # if not os.path.exists(path): # raise RuntimeError("File '{}' is missing".format(path)) for tool in ('cd-hit', 'prank', ...
Check if data and third party tools are available :raises: RuntimeError
def predictor(self, (i, j, A, alpha, Bb)): "Add to chart any rules for B that could help extend this edge." B = Bb[0] if B in self.grammar.rules: for rhs in self.grammar.rewrites_for(B): self.add_edge([j, j, B, [], rhs])
Add to chart any rules for B that could help extend this edge.
def find(self, path, all=False): """ Looks for files in the extra locations as defined in ``MEDIA_FIXTURES_FILES_DIRS``. """ matches = [] for prefix, root in self.locations: if root not in searched_locations: searched_locations.append(root) ...
Looks for files in the extra locations as defined in ``MEDIA_FIXTURES_FILES_DIRS``.
def make_tempfile (self, want='handle', resolution='try_unlink', suffix='', **kwargs): """Get a context manager that creates and cleans up a uniquely-named temporary file with a name similar to this path. This function returns a context manager that creates a secure temporary file with ...
Get a context manager that creates and cleans up a uniquely-named temporary file with a name similar to this path. This function returns a context manager that creates a secure temporary file with a path similar to *self*. In particular, if ``str(self)`` is something like ``foo/bar``, t...
def prepare(self): """Prepare the ordered list of transformers and reset context state to initial.""" self.scope = 0 self.mapping = deque([0]) self._handler = [i() for i in sorted(self.handlers, key=lambda handler: handler.priority)]
Prepare the ordered list of transformers and reset context state to initial.
def numpy_to_data_array(ary, *, var_name="data", coords=None, dims=None): """Convert a numpy array to an xarray.DataArray. The first two dimensions will be (chain, draw), and any remaining dimensions will be "shape". If the numpy array is 1d, this dimension is interpreted as draw If the numpy array...
Convert a numpy array to an xarray.DataArray. The first two dimensions will be (chain, draw), and any remaining dimensions will be "shape". If the numpy array is 1d, this dimension is interpreted as draw If the numpy array is 2d, it is interpreted as (chain, draw) If the numpy array is 3 or more di...
def venv_pth(self, dirs): ''' Add the directories in `dirs` to the `sys.path`. A venv.pth file will be written in the site-packages dir of this virtualenv to add dirs to sys.path. dirs: a list of directories. ''' # Create venv.pth to add dirs to sys.path when us...
Add the directories in `dirs` to the `sys.path`. A venv.pth file will be written in the site-packages dir of this virtualenv to add dirs to sys.path. dirs: a list of directories.
def command_upgrade(self): """Recreate repositories package lists """ if len(self.args) == 1 and self.args[0] == "upgrade": Initialization(False).upgrade(only="") elif (len(self.args) == 2 and self.args[0] == "upgrade" and self.args[1].startswith("--only=")): ...
Recreate repositories package lists
def def_links(mobj): """Define Sphinx requirements links.""" fdict = json_load(os.path.join("data", "requirements.json")) sdeps = sorted(fdict.keys()) olines = [] for item in sdeps: olines.append( ".. _{name}: {url}\n".format( name=fdict[item]["name"], url=fdict[i...
Define Sphinx requirements links.
def export(self, name, columns, points): """Write the points in MQTT.""" WHITELIST = '_-' + string.ascii_letters + string.digits SUBSTITUTE = '_' def whitelisted(s, whitelist=WHITELIST, substitute=SUBSTITUTE): return ''.join(c...
Write the points in MQTT.
def get_minimum_size(self, data): """Returns the rotated minimum size.""" size = self.element.get_minimum_size(data) if self.angle in (RotateLM.NORMAL, RotateLM.UPSIDE_DOWN): return size else: return datatypes.Point(size.y, size.x)
Returns the rotated minimum size.
def current(cls): """ Helper method for getting the current peer of whichever host we're running on. """ name = socket.getfqdn() ip = socket.gethostbyname(name) return cls(name, ip)
Helper method for getting the current peer of whichever host we're running on.
def _auth_session(self, username, password): """ Creates session to Hetzner account, authenticates with given credentials and returns the session, if authentication was successful. Otherwise raises error. """ api = self.api[self.account]['auth'] endpoint = api.get('endpoi...
Creates session to Hetzner account, authenticates with given credentials and returns the session, if authentication was successful. Otherwise raises error.
def get_representative_cases(self): """ >>> armr = OldNorseNoun("armr", decl_utils.Gender.masculine) >>> armr.set_representative_cases("armr", "arms", "armar") >>> armr.get_representative_cases() ('armr', 'arms', 'armar') :return: nominative singular, genetive singular, ...
>>> armr = OldNorseNoun("armr", decl_utils.Gender.masculine) >>> armr.set_representative_cases("armr", "arms", "armar") >>> armr.get_representative_cases() ('armr', 'arms', 'armar') :return: nominative singular, genetive singular, nominative plural
def list_config(root_package = 'vlcp'): ''' Walk through all the sub modules, find subclasses of vlcp.config.Configurable, list their available configurations through _default_ prefix ''' pkg = __import__(root_package, fromlist=['_']) return_dict = OrderedDict() for imp, module, _ in walk_pa...
Walk through all the sub modules, find subclasses of vlcp.config.Configurable, list their available configurations through _default_ prefix
def _apply_role_tree(self, perm_tree, role): """In permission tree, sets `'checked': True` for the permissions that the role has.""" role_permissions = role.get_permissions() for perm in role_permissions: self._traverse_tree(perm_tree, perm)['checked'] = True return perm_tree
In permission tree, sets `'checked': True` for the permissions that the role has.
def smove(self, src, dst, value): """Emulate smove.""" src_set = self._get_set(src, 'SMOVE') dst_set = self._get_set(dst, 'SMOVE') value = self._encode(value) if value not in src_set: return False src_set.discard(value) dst_set.add(value) sel...
Emulate smove.
def _check_for_api_errors(geocoding_results): """ Raise any exceptions if there were problems reported in the api response. """ status_result = geocoding_results.get("STATUS", {}) if "NO_RESULTS" in status_result.get("status", ""): return api_call_succ...
Raise any exceptions if there were problems reported in the api response.
def check_list_type(objects, allowed_type, name, allow_none=True): """Verify that objects in list are of the allowed type or raise TypeError. Args: objects: The list of objects to check. allowed_type: The allowed type of items in 'settings'. name: Name of the list of objects, added to the exception. ...
Verify that objects in list are of the allowed type or raise TypeError. Args: objects: The list of objects to check. allowed_type: The allowed type of items in 'settings'. name: Name of the list of objects, added to the exception. allow_none: If set, None is also allowed. Raises: TypeError: if...
def register(cls): """ Register a given model in the registry """ registry_entry = RegistryEntry(category = cls.category, namespace = cls.namespace, name = cls.name, cls=cls) if registry_entry not in registry and not exists_in_registry(cls.category, cls.namespace, cls.name): registry.append(...
Register a given model in the registry
def addAsn1MibSource(self, *asn1Sources, **kwargs): """Adds path to a repository to search ASN.1 MIB files. Parameters ---------- *asn1Sources : one or more URL in form of :py:obj:`str` identifying local or remote ASN.1 MIB repositories. Path must include the *@m...
Adds path to a repository to search ASN.1 MIB files. Parameters ---------- *asn1Sources : one or more URL in form of :py:obj:`str` identifying local or remote ASN.1 MIB repositories. Path must include the *@mib@* component which will be replaced with MIB modu...
def load(self): """ Function load Get the list of all objects @return RETURN: A ForemanItem list """ cl_tmp = self.api.list(self.objName, limit=self.searchLimit).values() cl = [] for i in cl_tmp: cl.extend(i) return {x[self.index]: ItemPuppetC...
Function load Get the list of all objects @return RETURN: A ForemanItem list
def _generate_request_handler_proxy(handler_class, handler_args, name): """When a tornado.web.RequestHandler gets mounted we create a launcher function""" @scope.inject def request_handler_wrapper(app, handler, **kwargs): handler = handler_class(app, handler.request, **handler_args) handler...
When a tornado.web.RequestHandler gets mounted we create a launcher function
def _prepare_sets(self, sets): """ The original "_prepare_sets" method simple return the list of sets in _lazy_collection, know to be all keys of redis sets. As the new "intersect" method can accept different types of "set", we have to handle them because we must return only keys...
The original "_prepare_sets" method simple return the list of sets in _lazy_collection, know to be all keys of redis sets. As the new "intersect" method can accept different types of "set", we have to handle them because we must return only keys of redis sets.
def _lcs(x, y): """Computes the length of the LCS between two seqs. The implementation below uses a DP programming algorithm and runs in O(nm) time where n = len(x) and m = len(y). Source: http://www.algorithmist.com/index.php/Longest_Common_Subsequence Args: x: collection of words y: collection of ...
Computes the length of the LCS between two seqs. The implementation below uses a DP programming algorithm and runs in O(nm) time where n = len(x) and m = len(y). Source: http://www.algorithmist.com/index.php/Longest_Common_Subsequence Args: x: collection of words y: collection of words Returns: ...
def values(self, with_defaults=True): """ Return the values dictionary, defaulting to default values """ return dict(((k, str(v)) for k, v in self._inputs.items() if not v.is_empty(with_defaults)))
Return the values dictionary, defaulting to default values
def _generate(self, pset, min_, max_, condition, type_=None): """Generate a Tree as a list of lists. The tree is build from the root to the leaves, and it stop growing when the condition is fulfilled. Parameters ---------- pset: PrimitiveSetTyped Primitive s...
Generate a Tree as a list of lists. The tree is build from the root to the leaves, and it stop growing when the condition is fulfilled. Parameters ---------- pset: PrimitiveSetTyped Primitive set from which primitives are selected. min_: int Mini...
def magnitude(self): """Return the magnitude when treating the point as a vector.""" return math.sqrt( self.x * self.x + self.y * self.y )
Return the magnitude when treating the point as a vector.
def search_end_date(self, search_end_date): """ :type search_end_date: astropy.io.Time :param search_end_date: search for frames take after the given date. """ assert isinstance(search_end_date, Time) self._search_end_date = search_end_date.replicate(format='iso') ...
:type search_end_date: astropy.io.Time :param search_end_date: search for frames take after the given date.
def parse_enum_value_definition(lexer: Lexer) -> EnumValueDefinitionNode: """EnumValueDefinition: Description? EnumValue Directives[Const]?""" start = lexer.token description = parse_description(lexer) name = parse_name(lexer) directives = parse_directives(lexer, True) return EnumValueDefinition...
EnumValueDefinition: Description? EnumValue Directives[Const]?
def calculate_size(name, thread_id): """ Calculates the request payload size""" data_size = 0 data_size += calculate_size_str(name) data_size += LONG_SIZE_IN_BYTES return data_size
Calculates the request payload size
def filter_files_extensions(files, extension_lists): """ Put the files in buckets according to extension_lists files=[movie.avi, movie.srt], extension_lists=[[avi],[srt]] ==> [[movie.avi],[movie.srt]] :param files: A list of files :param extension_lists: A list of list of extensions :return: The...
Put the files in buckets according to extension_lists files=[movie.avi, movie.srt], extension_lists=[[avi],[srt]] ==> [[movie.avi],[movie.srt]] :param files: A list of files :param extension_lists: A list of list of extensions :return: The files filtered and sorted according to extension_lists
def woodbury_vector(self): """ Woodbury vector in the gaussian likelihood case only is defined as $$ (K_{xx} + \Sigma)^{-1}Y \Sigma := \texttt{Likelihood.variance / Approximate likelihood covariance} $$ """ if self._woodbury_vector is None: sel...
Woodbury vector in the gaussian likelihood case only is defined as $$ (K_{xx} + \Sigma)^{-1}Y \Sigma := \texttt{Likelihood.variance / Approximate likelihood covariance} $$
def create_table(self, table, fields): """ Responsys.createTable call Accepts: InteractObject table list fields Returns True on success """ table = table.get_soap_object(self.client) return self.call('createTable', table, fields)
Responsys.createTable call Accepts: InteractObject table list fields Returns True on success
def _reducedProtToPeps(protToPeps, proteins): """Returns a new, reduced "protToPeps" dictionary that does not contain entries present in "proteins". :param protToPeps: dict, for each protein (=key) contains a set of associated peptides (=value). For Example {protein: {peptide, ...}, ...} :param...
Returns a new, reduced "protToPeps" dictionary that does not contain entries present in "proteins". :param protToPeps: dict, for each protein (=key) contains a set of associated peptides (=value). For Example {protein: {peptide, ...}, ...} :param proteins: a list of proteinSet :returns: dict, p...
def emit(self, record): """Emit a formatted log record via DDP.""" if getattr(this, 'subs', {}).get(LOGS_NAME, False): self.format(record) this.send({ 'msg': ADDED, 'collection': LOGS_NAME, 'id': meteor_random_id('/collection/%s' % ...
Emit a formatted log record via DDP.
def weighted_round_robin(iterable): '''Takes an iterable of tuples of <item>, <weight> and cycles around them, returning heavier (integer) weighted items more frequently. ''' cyclable_list = [] assigned_weight = 0 still_to_process = [ (item, weight) for item, weight in sorted(ite...
Takes an iterable of tuples of <item>, <weight> and cycles around them, returning heavier (integer) weighted items more frequently.
def standings(self, league_table, league): """Store output of league standings to a CSV file""" headers = ['Position', 'Team Name', 'Games Played', 'Goal For', 'Goals Against', 'Goal Difference', 'Points'] result = [headers] result.extend([team['position'], ...
Store output of league standings to a CSV file
def run_initial(self, events): """Runs the initial batch upload :param events: an iterable containing events """ self_name = type(self).__name__ for i, batch in enumerate(grouper(events, self.INITIAL_BATCH_SIZE, skip_missing=True), 1): self.logger.debug('%s processin...
Runs the initial batch upload :param events: an iterable containing events
def set_pdb_trace(pm=False): """Start the Python debugger when robotframework is running. This makes sure that pdb can use stdin/stdout even though robotframework has redirected I/O. """ import sys import pdb for attr in ("stdin", "stdout", "stderr"): setattr(sys, attr, getattr(sys...
Start the Python debugger when robotframework is running. This makes sure that pdb can use stdin/stdout even though robotframework has redirected I/O.
def combineReads(filename, sequences, readClass=DNARead, upperCase=False, idPrefix='command-line-read-'): """ Combine FASTA reads from a file and/or sequence strings. @param filename: A C{str} file name containing FASTA reads. @param sequences: A C{list} of C{str} sequences. If a seque...
Combine FASTA reads from a file and/or sequence strings. @param filename: A C{str} file name containing FASTA reads. @param sequences: A C{list} of C{str} sequences. If a sequence contains spaces, the last field (after splitting on spaces) will be used as the sequence and the first fields will ...
async def _play(self, ctx, *, query: str): """ Searches and plays a song from a given query. """ player = self.bot.lavalink.players.get(ctx.guild.id) query = query.strip('<>') if not url_rx.match(query): query = f'ytsearch:{query}' tracks = await self.bot....
Searches and plays a song from a given query.
def list_of_vars(arg_plot): """Construct list of variables per plot. Args: arg_plot (str): string with variable names separated with ``_`` (figures), ``.`` (subplots) and ``,`` (same subplot). Returns: three nested lists of str - variables on the same subplot; -...
Construct list of variables per plot. Args: arg_plot (str): string with variable names separated with ``_`` (figures), ``.`` (subplots) and ``,`` (same subplot). Returns: three nested lists of str - variables on the same subplot; - subplots on the same figure; ...
def generate_markdown(cls): """ Documents values in markdown """ lines = [] if cls.__doc__: lines.extend(['# {}'.format(cls.__doc__), '']) for k, v in cls._values.items(): lines.append('* **{}** '.format(k)) if v.required: ...
Documents values in markdown
def is_auto_partition_required(self, brain_or_object): """Returns whether the passed in object needs to be partitioned """ obj = api.get_object(brain_or_object) if not IAnalysisRequest.providedBy(obj): return False template = obj.getTemplate() return template ...
Returns whether the passed in object needs to be partitioned
def AddWeight(self, path_segment_index, weight): """Adds a weight for a specific path segment index. Args: path_segment_index: an integer containing the path segment index. weight: an integer containing the weight. Raises: ValueError: if the path segment weights do not contain ...
Adds a weight for a specific path segment index. Args: path_segment_index: an integer containing the path segment index. weight: an integer containing the weight. Raises: ValueError: if the path segment weights do not contain the path segment index.
def snapshot_created(name, ami_name, instance_name, wait_until_available=True, wait_timeout_seconds=300, **kwargs): ''' Create a snapshot from the given instance .. versionadded:: 2016.3.0 ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {} ...
Create a snapshot from the given instance .. versionadded:: 2016.3.0
def get_arg_parser(cls, settings = None, option_prefix = u'--', add_help = False): """Make a command-line option parser. The returned parser may be used as a parent parser for application argument parser. :Parameters: ...
Make a command-line option parser. The returned parser may be used as a parent parser for application argument parser. :Parameters: - `settings`: list of PyXMPP2 settings to consider. By default all 'basic' settings are provided. - `option_prefix`: custom ...
def recall_score(y_true, y_pred, average='micro', suffix=False): """Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samp...
Compute the recall. The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of true positives and ``fn`` the number of false negatives. The recall is intuitively the ability of the classifier to find all the positive samples. The best value is 1 and the worst value is 0. Args: ...