code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def create_api_integration(restApiId, resourcePath, httpMethod, integrationType, integrationHttpMethod, uri, credentials, requestParameters=None, requestTemplates=None, region=None, key=None, keyid=None, profile=None): ''' Creates an integration for a given ...
Creates an integration for a given method in a given API. If integrationType is MOCK, uri and credential parameters will be ignored. uri is in the form of (substitute APIGATEWAY_REGION and LAMBDA_FUNC_ARN) "arn:aws:apigateway:APIGATEWAY_REGION:lambda:path/2015-03-31/functions/LAMBDA_FUNC_ARN/invocations" ...
def find_all_checks(self, **kwargs): """ Finds all checks for this entity with attributes matching ``**kwargs``. This isn't very efficient: it loads the entire list then filters on the Python side. """ checks = self._check_manager.find_all_checks(**kwargs) for ch...
Finds all checks for this entity with attributes matching ``**kwargs``. This isn't very efficient: it loads the entire list then filters on the Python side.
def setFlag(self, flag, state=True): """ Sets whether or not the given flag is enabled or disabled. :param flag | <XExporter.Flags> """ has_flag = self.testFlag(flag) if has_flag and not state: self.setFlags(self.flags() ^ flag) ...
Sets whether or not the given flag is enabled or disabled. :param flag | <XExporter.Flags>
def _get_face2(shape=None, face_r=1.0, smile_r1=0.5, smile_r2=0.7, eye_r=0.2): """ Create 2D binar face :param shape: :param face_r: :param smile_r1: :param smile_r2: :param eye_r: :return: """ # data3d = np.zeros([1,7,7], dtype=np.int16) if shape is None: shape = [3...
Create 2D binar face :param shape: :param face_r: :param smile_r1: :param smile_r2: :param eye_r: :return:
def init(self, projectname=None, description=None, **kwargs): """ Initialize a new experiment Parameters ---------- projectname: str The name of the project that shall be used. If None, the last one created will be used description: str ...
Initialize a new experiment Parameters ---------- projectname: str The name of the project that shall be used. If None, the last one created will be used description: str A short summary of the experiment ``**kwargs`` Keyword argum...
def problem_id(self, value): """The problem_id property. Args: value (string). the property value. """ if value == self._defaults['problemId'] and 'problemId' in self._values: del self._values['problemId'] else: self._values['problemId...
The problem_id property. Args: value (string). the property value.
def separation(sources, fs=22050, labels=None, alpha=0.75, ax=None, **kwargs): '''Source-separation visualization Parameters ---------- sources : np.ndarray, shape=(nsrc, nsampl) A list of waveform buffers corresponding to each source fs : number > 0 The sampling rate labels :...
Source-separation visualization Parameters ---------- sources : np.ndarray, shape=(nsrc, nsampl) A list of waveform buffers corresponding to each source fs : number > 0 The sampling rate labels : list of strings An optional list of descriptors corresponding to each source ...
def install_package_to_venv(self): ''' Installs package given as first argument to virtualenv without dependencies ''' try: self.env.install(self.name, force=True, options=["--no-deps"]) except (ve.PackageInstallationException, ve.VirtualenvRea...
Installs package given as first argument to virtualenv without dependencies
def product(pc, service, attrib, sku): """ Get a list of a service's products. The list will be in the given region, matching the specific terms and any given attribute filters or a SKU. """ pc.service = service.lower() pc.sku = sku pc.add_attributes(attribs=attrib) click.echo("Servi...
Get a list of a service's products. The list will be in the given region, matching the specific terms and any given attribute filters or a SKU.
def generate_blob(self, container_name, blob_name, permission=None, expiry=None, start=None, id=None, ip=None, protocol=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, conte...
Generates a shared access signature for the blob. Use the returned signature with the sas_token parameter of any BlobService. :param str container_name: Name of container. :param str blob_name: Name of blob. :param BlobPermissions permission: The perm...
def _get_svc_list(service_status): ''' Returns all service statuses ''' prefix = '/etc/rc.d/' ret = set() lines = glob.glob('{0}*'.format(prefix)) for line in lines: svc = _get_svc(line, service_status) if svc is not None: ret.add(svc) return sorted(ret)
Returns all service statuses
def modify_fk_constraint(apps, schema_editor): """ Delete's the current foreign key contraint on the outbound field, and adds it again, but this time with an ON DELETE clause """ model = apps.get_model("message_sender", "OutboundSendFailure") table = model._meta.db_table with schema_editor....
Delete's the current foreign key contraint on the outbound field, and adds it again, but this time with an ON DELETE clause
def inner(a,b): ''' inner(a,b) yields the dot product of a and b, doing so in a fashion that respects sparse matrices when encountered. This does not error check for bad dimensionality. If a or b are constants, then the result is just the a*b; if a and b are both vectors or both matrices, then th...
inner(a,b) yields the dot product of a and b, doing so in a fashion that respects sparse matrices when encountered. This does not error check for bad dimensionality. If a or b are constants, then the result is just the a*b; if a and b are both vectors or both matrices, then the inner product is dot(a,b);...
def train_model(params: Params, serialization_dir: str, file_friendly_logging: bool = False, recover: bool = False, force: bool = False, cache_directory: str = None, cache_prefix: str = None) -> Model: """ Trains the...
Trains the model specified in the given :class:`Params` object, using the data and training parameters also specified in that object, and saves the results in ``serialization_dir``. Parameters ---------- params : ``Params`` A parameter object specifying an AllenNLP Experiment. serialization...
def set_euk_hmm(self, args): 'Set the hmm used by graftM to cross check for euks.' if hasattr(args, 'euk_hmm_file'): pass elif not hasattr(args, 'euk_hmm_file'): # set to path based on the location of bin/graftM, which has # a more stable relative path to the ...
Set the hmm used by graftM to cross check for euks.
def send(self, request, headers=None, content=None, **kwargs): """Prepare and send request object according to configuration. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param content: Any body data to add to the ...
Prepare and send request object according to configuration. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param content: Any body data to add to the request. :param config: Any specific config overrides
def cursor_position_changed(self): """Brace matching""" if self.bracepos is not None: self.__highlight(self.bracepos, cancel=True) self.bracepos = None cursor = self.textCursor() if cursor.position() == 0: return cursor.movePosition(QTe...
Brace matching
def convertMzml(mzmlPath, outputDirectory=None): """Imports an mzml file and converts it to a MsrunContainer file :param mzmlPath: path of the mzml file :param outputDirectory: directory where the MsrunContainer file should be written if it is not specified, the output directory is set to the mzml file...
Imports an mzml file and converts it to a MsrunContainer file :param mzmlPath: path of the mzml file :param outputDirectory: directory where the MsrunContainer file should be written if it is not specified, the output directory is set to the mzml files directory.
def page(self, recurring=values.unset, trigger_by=values.unset, usage_category=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of TriggerInstance records from the API. Request is executed immediately ...
Retrieve a single page of TriggerInstance records from the API. Request is executed immediately :param TriggerInstance.Recurring recurring: The frequency of recurring UsageTriggers to read :param TriggerInstance.TriggerField trigger_by: The trigger field of the UsageTriggers to read :pa...
def sort(self, *sorting, **kwargs): """Sort resources.""" sorting_ = [] for name, desc in sorting: field = self.meta.model._meta.fields.get(name) if field is None: continue if desc: field = field.desc() sorting_.appe...
Sort resources.
def get_destination(self, filepath, targetdir=None): """ Return destination path from given source file path. Destination is allways a file with extension ``.css``. Args: filepath (str): A file path. The path is allways relative to sources directory. If not ...
Return destination path from given source file path. Destination is allways a file with extension ``.css``. Args: filepath (str): A file path. The path is allways relative to sources directory. If not relative, ``targetdir`` won't be joined. abso...
def copy_function(func, name=None): """Copy a function object with different name. Args: func (function): Function to be copied. name (string, optional): Name of the new function. If not spacified, the same name of `func` will be used. Returns: newfunc (function): New f...
Copy a function object with different name. Args: func (function): Function to be copied. name (string, optional): Name of the new function. If not spacified, the same name of `func` will be used. Returns: newfunc (function): New function with different name.
def _set_system_mode(self, v, load=False): """ Setter method for system_mode, mapped from YANG variable /hardware/system_mode (system-mode-type) If this variable is read-only (config: false) in the source YANG file, then _set_system_mode is considered as a private method. Backends looking to populat...
Setter method for system_mode, mapped from YANG variable /hardware/system_mode (system-mode-type) If this variable is read-only (config: false) in the source YANG file, then _set_system_mode is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._s...
def _wpad(l, windowsize, stepsize): """ Parameters l - The length of the input array windowsize - the size of each window of samples stepsize - the number of samples to move the window each step Returns The length the input array should be so that no samples are leftover ...
Parameters l - The length of the input array windowsize - the size of each window of samples stepsize - the number of samples to move the window each step Returns The length the input array should be so that no samples are leftover
def read(self, gpio): """ Returns the GPIO level. gpio:= 0-53. ... yield from pi.set_mode(23, pigpio.INPUT) yield from pi.set_pull_up_down(23, pigpio.PUD_DOWN) print(yield from pi.read(23)) 0 yield from pi.set_pull_up_down(23, pigpio.PUD_UP) ...
Returns the GPIO level. gpio:= 0-53. ... yield from pi.set_mode(23, pigpio.INPUT) yield from pi.set_pull_up_down(23, pigpio.PUD_DOWN) print(yield from pi.read(23)) 0 yield from pi.set_pull_up_down(23, pigpio.PUD_UP) print(yield from pi.read(23)) 1 ...
def update_asset_browser(self, project, releasetype): """update the assetbrowser to the given project :param releasetype: the releasetype for the model :type releasetype: :data:`djadapter.RELEASETYPES` :param project: the project of the assets :type project: :class:`djadapter.mo...
update the assetbrowser to the given project :param releasetype: the releasetype for the model :type releasetype: :data:`djadapter.RELEASETYPES` :param project: the project of the assets :type project: :class:`djadapter.models.Project` :returns: None :rtype: None ...
def construct_graph(sakefile, settings): """ Takes the sakefile dictionary and builds a NetworkX graph Args: A dictionary that is the parsed Sakefile (from sake.py) The settings dictionary Returns: A NetworkX graph """ verbose = settings["verbose"] sprint = settings...
Takes the sakefile dictionary and builds a NetworkX graph Args: A dictionary that is the parsed Sakefile (from sake.py) The settings dictionary Returns: A NetworkX graph
def report(ctx, board, done, output): ctx.obj['board_id'] = board ts = TrelloStats(ctx.obj) """ Reporting mode - Daily snapshots of a board for ongoing reporting: -> trellis report --board=87hiudhw --spend --revenue ...
Reporting mode - Daily snapshots of a board for ongoing reporting: -> trellis report --board=87hiudhw --spend --revenue --done=Done
def VerifyRow(self, parser_mediator, row): """Verifies if a line of the file is in the expected format. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row (dict[str, str]): fields of a single row, as specified...
Verifies if a line of the file is in the expected format. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. row (dict[str, str]): fields of a single row, as specified in COLUMNS. Returns: bool: True if thi...
def calculate_r_matrices(fine_states, reduced_matrix_elements, q=None, numeric=True, convention=1): ur"""Calculate the matrix elements of the electric dipole (in the helicity basis). We calculate all matrix elements for the D2 line in Rb 87. >>> from sympy import symbols, ppri...
ur"""Calculate the matrix elements of the electric dipole (in the helicity basis). We calculate all matrix elements for the D2 line in Rb 87. >>> from sympy import symbols, pprint >>> red = symbols("r", positive=True) >>> reduced_matrix_elements = [[0, -red], [red, 0]] >>> g = State("Rb", 87, ...
def download_file(save_path, file_url): """ Download file from http url link """ r = requests.get(file_url) # create HTTP response object with open(save_path, 'wb') as f: f.write(r.content) return save_path
Download file from http url link
def _on(on_signals, callback, max_calls=None): """ Proxy for `smokesignal.on`, which is compatible as both a function call and a decorator. This method cannot be used as a decorator :param signals: A single signal or list/tuple of signals that callback should respond to :param callback: A callable ...
Proxy for `smokesignal.on`, which is compatible as both a function call and a decorator. This method cannot be used as a decorator :param signals: A single signal or list/tuple of signals that callback should respond to :param callback: A callable that should repond to supplied signal(s) :param max_cal...
def _get_model_fitting(self, mf_id): """ Retreive model fitting with identifier 'mf_id' from the list of model fitting objects stored in self.model_fitting """ for model_fitting in self.model_fittings: if model_fitting.activity.id == mf_id: return mode...
Retreive model fitting with identifier 'mf_id' from the list of model fitting objects stored in self.model_fitting
def upload_files(self, abspaths, relpaths, remote_objects): """ Determines files to be uploaded and call ``upload_file`` on each. """ for relpath in relpaths: abspath = [p for p in abspaths if p[len(self.file_root):] == relpath][0] cloud_datetime = remote_objects[...
Determines files to be uploaded and call ``upload_file`` on each.
def is_BF_hypergraph(self): """Indicates whether the hypergraph is a BF-hypergraph. A BF-hypergraph consists of only B-hyperedges and F-hyperedges. See "is_B_hypergraph" or "is_F_hypergraph" for more details. :returns: bool -- True iff the hypergraph is an F-hypergraph. """ ...
Indicates whether the hypergraph is a BF-hypergraph. A BF-hypergraph consists of only B-hyperedges and F-hyperedges. See "is_B_hypergraph" or "is_F_hypergraph" for more details. :returns: bool -- True iff the hypergraph is an F-hypergraph.
def as_sql(self, *args, **kwargs): """ Overrides the :class:`SQLUpdateCompiler` method in order to remove any CTE-related WHERE clauses, which are not necessary for UPDATE queries, yet may have been added if this query was cloned from a CTEQuery. :return: :rtype: ...
Overrides the :class:`SQLUpdateCompiler` method in order to remove any CTE-related WHERE clauses, which are not necessary for UPDATE queries, yet may have been added if this query was cloned from a CTEQuery. :return: :rtype:
def diffusion_correlated(diffusion_constant=0.2, exposure_time=0.05, samples=40, phi=0.25): """ Calculate the (perhaps) correlated diffusion effect between particles during the exposure time of the confocal microscope. diffusion_constant is in terms of seconds and pixel sizes exposure_time is in...
Calculate the (perhaps) correlated diffusion effect between particles during the exposure time of the confocal microscope. diffusion_constant is in terms of seconds and pixel sizes exposure_time is in seconds 1 micron radius particle: D = kT / (6 a\pi\eta) for 80/20 g/w (60 mPas), 3600 nm^2...
def tmpdir(): """ Create a tempdir context for the cwd and remove it after. """ target = None try: with _tmpdir_extant() as target: yield target finally: if target is not None: shutil.rmtree(target, ignore_errors=True)
Create a tempdir context for the cwd and remove it after.
def get_workflow_status_of(brain_or_object, state_var="review_state"): """Get the current workflow status of the given brain or context. :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :param state_var: The name of ...
Get the current workflow status of the given brain or context. :param brain_or_object: A single catalog brain or content object :type brain_or_object: ATContentType/DexterityContentType/CatalogBrain :param state_var: The name of the state variable :type state_var: string :returns: Status :rtype...
def gaussian(data, mean, covariance): """! @brief Calculates gaussian for dataset using specified mean (mathematical expectation) and variance or covariance in case multi-dimensional data. @param[in] data (list): Data that is used for gaussian calculation. @param[in] mean (float|n...
! @brief Calculates gaussian for dataset using specified mean (mathematical expectation) and variance or covariance in case multi-dimensional data. @param[in] data (list): Data that is used for gaussian calculation. @param[in] mean (float|numpy.array): Mathematical expectation used for...
def remove_field(self, name): """https://github.com/frictionlessdata/tableschema-py#schema """ field = self.get_field(name) if field: predicat = lambda field: field.get('name') != name self.__current_descriptor['fields'] = filter( predicat, self.__...
https://github.com/frictionlessdata/tableschema-py#schema
def _parse_xmatch_catalog_header(xc, xk): ''' This parses the header for a catalog file and returns it as a file object. Parameters ---------- xc : str The file name of an xmatch catalog prepared previously. xk : list of str This is a list of column names to extract from the x...
This parses the header for a catalog file and returns it as a file object. Parameters ---------- xc : str The file name of an xmatch catalog prepared previously. xk : list of str This is a list of column names to extract from the xmatch catalog. Returns ------- tuple ...
async def retract(self, mount: top_types.Mount, margin: float): """ Pull the specified mount up to its home position. Works regardless of critical point or home status. """ smoothie_ax = Axis.by_mount(mount).name.upper() async with self._motion_lock: smoothie_pos = s...
Pull the specified mount up to its home position. Works regardless of critical point or home status.
def record_iterator(xml): """ Iterate over all ``<record>`` tags in `xml`. Args: xml (str/file): Input string with XML. UTF-8 is prefered encoding, unicode should be ok. Yields: MARCXMLRecord: For each corresponding ``<record>``. """ # handle file-like o...
Iterate over all ``<record>`` tags in `xml`. Args: xml (str/file): Input string with XML. UTF-8 is prefered encoding, unicode should be ok. Yields: MARCXMLRecord: For each corresponding ``<record>``.
def set(self, key, val, time=0, min_compress_len=0): '''Unconditionally sets a key to a given value in the memcache. The C{key} can optionally be an tuple, with the first element being the server hash value and the second being the key. If you want to avoid making this module calculate ...
Unconditionally sets a key to a given value in the memcache. The C{key} can optionally be an tuple, with the first element being the server hash value and the second being the key. If you want to avoid making this module calculate a hash value. You may prefer, for example, to keep all o...
def pick_frequency_line(self, filename, frequency, cumulativefield='cumulative_frequency'): '''Given a numeric frequency, pick a line from a csv with a cumulative frequency field''' if resource_exists('censusname', filename): with closing(resource_stream('censusname', filename)) as b: ...
Given a numeric frequency, pick a line from a csv with a cumulative frequency field
def add_deviation(self, dev, td=None): """ Add a deviation survey to this instance, and try to compute a position log from it. """ self.deviation = dev try: self.compute_position_log(td=td) except: self.position = None return
Add a deviation survey to this instance, and try to compute a position log from it.
def publish(self, value): """ Accepts: float Returns: float """ value = super(Float, self).publish(value) if isinstance(value, int): value = float(value) return value
Accepts: float Returns: float
def _set_ipv6_track(self, v, load=False): """ Setter method for ipv6_track, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track (container) If this variable is read-only (config: false) in the source YANG file, then _set_ipv6_track is considered as a private ...
Setter method for ipv6_track, mapped from YANG variable /rbridge_id/interface/ve/ipv6/ipv6_local_anycast_gateway/ipv6_track (container) If this variable is read-only (config: false) in the source YANG file, then _set_ipv6_track is considered as a private method. Backends looking to populate this variable sh...
def ctc_symbol_loss(top_out, targets, model_hparams, vocab_size, weight_fn): """Compute the CTC loss.""" del model_hparams, vocab_size # unused arg logits = top_out with tf.name_scope("ctc_loss", values=[logits, targets]): # For CTC we assume targets are 1d, [batch, length, 1, 1] here. targets_shape = ...
Compute the CTC loss.
def addVariantFeature(self,variantFeature): '''Appends one VariantFeature to variantFeatures ''' if isinstance(variantFeature, Feature): self.features.append(variantFeature) else: raise(TypeError, 'variantFeature Type should be Feature, not %s' %...
Appends one VariantFeature to variantFeatures
def child_object(self): """ Get Task child object class """ from . import types child_klass = types.get(self.task_type.split('.')[1]) return child_klass.retrieve(self.task_id, client=self._client)
Get Task child object class
def Si_to_pandas_dict(S_dict): """Convert Si information into Pandas DataFrame compatible dict. Parameters ---------- S_dict : ResultDict Sobol sensitivity indices See Also ---------- Si_list_to_dict Returns ---------- tuple : of total, first, and second ...
Convert Si information into Pandas DataFrame compatible dict. Parameters ---------- S_dict : ResultDict Sobol sensitivity indices See Also ---------- Si_list_to_dict Returns ---------- tuple : of total, first, and second order sensitivities. Total...
def configure_logger(glob, multi_level, relative=False, logfile=None, syslog=False): """ Logger configuration function for setting either a simple debug mode or a multi-level one. :param glob: globals dictionary :param multi_level: boolean telling if multi-level deb...
Logger configuration function for setting either a simple debug mode or a multi-level one. :param glob: globals dictionary :param multi_level: boolean telling if multi-level debug is to be considered :param relative: use relative time for the logging messages :param logfile: log ...
def reciprocal_rank( model, test_interactions, train_interactions=None, user_features=None, item_features=None, preserve_rows=False, num_threads=1, check_intersections=True, ): """ Measure the reciprocal rank metric for a model: 1 / the rank of the highest ranked positive exa...
Measure the reciprocal rank metric for a model: 1 / the rank of the highest ranked positive example. A perfect score is 1.0. Parameters ---------- model: LightFM instance the fitted model to be evaluated test_interactions: np.float32 csr_matrix of shape [n_users, n_items] Non-zer...
def xack(self, stream, group_name, id, *ids): """Acknowledge a message for a given consumer group""" return self.execute(b'XACK', stream, group_name, id, *ids)
Acknowledge a message for a given consumer group
def filter(self, value, model=None, context=None): """ Filter Performs value filtering and returns filtered result. :param value: input value :param model: parent model being validated :param context: object, filtering context ...
Filter Performs value filtering and returns filtered result. :param value: input value :param model: parent model being validated :param context: object, filtering context :return: filtered value
def shorten_duplicate_content_url(url): """Remove anchor part and trailing index.html from URL.""" if '#' in url: url = url.split('#', 1)[0] if url.endswith('index.html'): return url[:-10] if url.endswith('index.htm'): return url[:-9] return url
Remove anchor part and trailing index.html from URL.
def select_data(db_file, slab=None, facet=None): """Gathers relevant data from SQL database generated by CATHUB. Parameters ---------- db_file : Path to database slab : Which metal (slab) to select. facet : Which facets to select. Returns ------- data : SQL cursor output. """ ...
Gathers relevant data from SQL database generated by CATHUB. Parameters ---------- db_file : Path to database slab : Which metal (slab) to select. facet : Which facets to select. Returns ------- data : SQL cursor output.
def visit_dictcomp(self, node, parent): """visit a DictComp node by returning a fresh instance of it""" newnode = nodes.DictComp(node.lineno, node.col_offset, parent) newnode.postinit( self.visit(node.key, newnode), self.visit(node.value, newnode), [self.visit...
visit a DictComp node by returning a fresh instance of it
def pot_to_requiv_contact(pot, q, sma, compno=1): """ TODO: add documentation """ return ConstraintParameter(pot._bundle, "pot_to_requiv_contact({}, {}, {}, {})".format(_get_expr(pot), _get_expr(q), _get_expr(sma), compno))
TODO: add documentation
def _get_es_version(self, config): """ Get the running version of elasticsearch. """ try: data = self._get_data(config.url, config, send_sc=False) # pre-release versions of elasticearch are suffixed with -rcX etc.. # peel that off so that the map below...
Get the running version of elasticsearch.
def connect(self): """ Connects to a Modbus-TCP Server or a Modbus-RTU Slave with the given Parameters """ if (self.__ser is not None): serial = importlib.import_module("serial") if self.__stopbits == 0: self.__ser.stopbits = serial.STOPBITS_ON...
Connects to a Modbus-TCP Server or a Modbus-RTU Slave with the given Parameters
def search(self): """ Click on the Search button and wait for the results page to be displayed """ self.q(css='button.btn').click() GitHubSearchResultsPage(self.browser).wait_for_page()
Click on the Search button and wait for the results page to be displayed
def exact_anniversaries(frequency, anniversary, start, finish): """ Returns the number of exact anniversaries if start and finish represent an anniversary. ie.. exact_anniversaries(DATE_FREQUENCY_MONTHLY, 10, date(2012, 2, 10), date(2012, 3, 9)) returns 1 exact_anniversaries(DATE_FREQUENC...
Returns the number of exact anniversaries if start and finish represent an anniversary. ie.. exact_anniversaries(DATE_FREQUENCY_MONTHLY, 10, date(2012, 2, 10), date(2012, 3, 9)) returns 1 exact_anniversaries(DATE_FREQUENCY_MONTHLY, 10, date(2012, 2, 10), date(2012, 4, 9)) returns 2
def list_scheduled_queries(self): """ List all scheduled_queries :return: A list of all scheduled query dicts :rtype: list of dict :raises: This will raise a :class:`ServerException<logentries_api.exceptions.ServerException>` if there is an error from Lo...
List all scheduled_queries :return: A list of all scheduled query dicts :rtype: list of dict :raises: This will raise a :class:`ServerException<logentries_api.exceptions.ServerException>` if there is an error from Logentries
def write_records(records, output_file, split=False): """Write FASTA records Write a FASTA file from an iterable of records. Parameters ---------- records : iterable Input records to write. output_file : file, str or pathlib.Path Output FASTA file to be written into. split...
Write FASTA records Write a FASTA file from an iterable of records. Parameters ---------- records : iterable Input records to write. output_file : file, str or pathlib.Path Output FASTA file to be written into. split : bool, optional If True, each record is written into...
def dendrogram(adata: AnnData, groupby: str, n_pcs: Optional[int]=None, use_rep: Optional[str]=None, var_names: Optional[List[str]]=None, use_raw: Optional[bool]=None, cor_method: Optional[str]='pearson', linkage_method: Optional[...
\ Computes a hierarchical clustering for the given `groupby` categories. By default, the PCA representation is used unless `.X` has less than 50 variables. Alternatively, a list of `var_names` (e.g. genes) can be given. Average values of either `var_names` or components are used to compute a correlat...
def conditions_list(self, conkey): """ Return a (possibly empty) list of conditions based on conkey. The conditions are returned raw, not parsed. conkey: str for cond<n>, startcond<n> or stopcond<n>, specify only the prefix. The list will be filled with all condi...
Return a (possibly empty) list of conditions based on conkey. The conditions are returned raw, not parsed. conkey: str for cond<n>, startcond<n> or stopcond<n>, specify only the prefix. The list will be filled with all conditions.
def less(x, y): """ Return True if x < y and False otherwise. This function returns False whenever x and/or y is a NaN. """ x = BigFloat._implicit_convert(x) y = BigFloat._implicit_convert(y) return mpfr.mpfr_less_p(x, y)
Return True if x < y and False otherwise. This function returns False whenever x and/or y is a NaN.
def remove_regex(urls, regex): """ Parse a list for non-matches to a regex. Args: urls: iterable of urls regex: string regex to be parsed for Returns: list of strings not matching regex """ if not regex: return urls # To avoid iterating over the characters...
Parse a list for non-matches to a regex. Args: urls: iterable of urls regex: string regex to be parsed for Returns: list of strings not matching regex
def result(self) -> workflow.IntervalGeneratorType: """ Generate intervals indicating the valid sentences. """ config = cast(SentenceSegementationConfig, self.config) index = -1 labels = None while True: # 1. Find the start of the sentence. ...
Generate intervals indicating the valid sentences.
def _parse_file(self, file_obj): """Directly read from file handler. Note that this will move the file pointer. """ byte_data = file_obj.read(self.size) self._parse_byte_data(byte_data)
Directly read from file handler. Note that this will move the file pointer.
def on_frame(self, frame_in): """On RPC Frame. :param specification.Frame frame_in: Amqp frame. :return: """ if frame_in.name not in self._request: return False uuid = self._request[frame_in.name] if self._response[uuid]: self._response[u...
On RPC Frame. :param specification.Frame frame_in: Amqp frame. :return:
def isPairTag(self): """ Returns: bool: True if this is pair tag - ``<body> .. </body>`` for example. """ if self.isComment() or self.isNonPairTag(): return False if self.isEndTag(): return True if self.isOpeningTag() and self.endtag:...
Returns: bool: True if this is pair tag - ``<body> .. </body>`` for example.
def convert_to_equivalent(self, unit, equivalence, **kwargs): """ Return a copy of the unyt_array in the units specified units, assuming the given equivalency. The dimensions of the specified units and the dimensions of the original array need not match so long as there is an app...
Return a copy of the unyt_array in the units specified units, assuming the given equivalency. The dimensions of the specified units and the dimensions of the original array need not match so long as there is an appropriate conversion in the specified equivalency. Parameters ----...
def build_graph(self, regularizers=()): '''Connect the layers in this network to form a computation graph. Parameters ---------- regularizers : list of :class:`theanets.regularizers.Regularizer` A list of the regularizers to apply while building the computation g...
Connect the layers in this network to form a computation graph. Parameters ---------- regularizers : list of :class:`theanets.regularizers.Regularizer` A list of the regularizers to apply while building the computation graph. Returns ------- outp...
async def main(loop): """Log packets from Bus.""" # Setting debug PYVLXLOG.setLevel(logging.DEBUG) stream_handler = logging.StreamHandler() stream_handler.setLevel(logging.DEBUG) PYVLXLOG.addHandler(stream_handler) # Connecting to KLF 200 pyvlx = PyVLX('pyvlx.yaml', loop=loop) await...
Log packets from Bus.
def best_four_point_to_sell(self): """ 判斷是否為四大賣點 :rtype: str or False """ result = [] if self.check_plus_bias_ratio() and \ (self.best_sell_1() or self.best_sell_2() or self.best_sell_3() or \ self.best_sell_4()): if self.best_sell_1(): ...
判斷是否為四大賣點 :rtype: str or False
def _set_scores(self): """ Compute anomaly scores for the time series by sliding both lagging window and future window. """ anom_scores = {} self._generate_SAX() self._construct_all_SAX_chunk_dict() length = self.time_series_length lws = self.lag_window_si...
Compute anomaly scores for the time series by sliding both lagging window and future window.
def _evictStaleDevices(self): """ A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a while. """ while self.running: expiredDeviceIds = [key for key, value in self.devices.items() if value.hasExpired()] ...
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a while.
def _httplib2_init(username, password): """Used to instantiate a regular HTTP request object""" obj = httplib2.Http() if username and password: obj.add_credentials(username, password) return obj
Used to instantiate a regular HTTP request object
def does_collection_exist(self, collection_name, database_name=None): """ Checks if a collection exists in CosmosDB. """ if collection_name is None: raise AirflowBadRequest("Collection name cannot be None.") existing_container = list(self.get_conn().QueryContainers( ...
Checks if a collection exists in CosmosDB.
def p_edgesigs(self, p): 'edgesigs : edgesigs SENS_OR edgesig' p[0] = p[1] + (p[3],) p.set_lineno(0, p.lineno(1))
edgesigs : edgesigs SENS_OR edgesig
def cmd_signing_remove(self, args): '''remove signing from server''' if not self.master.mavlink20(): print("You must be using MAVLink2 for signing") return self.master.mav.setup_signing_send(self.target_system, self.target_component, [0]*32, 0) self.master.disable...
remove signing from server
def _pretty_access_flags_gen(self): """ generator of the pretty access flags """ if self.is_public(): yield "public" if self.is_final(): yield "final" if self.is_abstract(): yield "abstract" if self.is_interface(): ...
generator of the pretty access flags
def describe_topic_rule(ruleName, region=None, key=None, keyid=None, profile=None): ''' Given a topic rule name describe its properties. Returns a dictionary of interesting properties. CLI Example: .. code-block:: bash salt myminion boto_iot.describe_topic_rule myrule '...
Given a topic rule name describe its properties. Returns a dictionary of interesting properties. CLI Example: .. code-block:: bash salt myminion boto_iot.describe_topic_rule myrule
def on_open(self): """ Shows an open file dialog and open the file if the dialog was accepted. """ filename, filter = QtWidgets.QFileDialog.getOpenFileName(self, 'Open') if filename: self.open_file(filename) self.actionRun.setEnabled(True) sel...
Shows an open file dialog and open the file if the dialog was accepted.
def pretty_date(time=False): """ Get a datetime object or a int() Epoch timestamp and return a pretty string like 'an hour ago', 'Yesterday', '3 months ago', 'just now', etc """ from datetime import datetime from django.utils import timezone now = timezone.now() if isinstance(time, i...
Get a datetime object or a int() Epoch timestamp and return a pretty string like 'an hour ago', 'Yesterday', '3 months ago', 'just now', etc
def package_username(repo): ''' >>> package_user('fabsetup-theno-termdown') (termdown, theno) ''' package = repo.replace('-', '_') username = repo.split('-')[1] return package, username
>>> package_user('fabsetup-theno-termdown') (termdown, theno)
def get_dihedral(self, i: int, j: int, k: int, l: int) -> float: """ Returns dihedral angle specified by four sites. Args: i: Index of first site j: Index of second site k: Index of third site l: Index of fourth site Returns: ...
Returns dihedral angle specified by four sites. Args: i: Index of first site j: Index of second site k: Index of third site l: Index of fourth site Returns: Dihedral angle in degrees.
def _monitor(last_ping, stop_plugin, is_shutting_down, timeout=5): """Monitors health checks (pings) from the Snap framework. If the plugin doesn't receive 3 consecutive health checks from Snap the plugin will shutdown. The default timeout is set to 5 seconds. """ _timeout_count = 0 _last_chec...
Monitors health checks (pings) from the Snap framework. If the plugin doesn't receive 3 consecutive health checks from Snap the plugin will shutdown. The default timeout is set to 5 seconds.
def write_ImageMapLine(tlx, tly, brx, bry, w, h, dpi, chr, segment_start, segment_end): """ Write out an image map area line with the coordinates passed to this function <area shape="rect" coords="tlx,tly,brx,bry" href="#chr7" title="chr7:100001..500001"> """ tlx, brx = [canvas2px(x, w, dpi) for...
Write out an image map area line with the coordinates passed to this function <area shape="rect" coords="tlx,tly,brx,bry" href="#chr7" title="chr7:100001..500001">
def becomeMemberOf(self, groupRole): """ Instruct this (user or group) Role to become a member of a group role. @param groupRole: The role that this group should become a member of. """ self.store.findOrCreate(RoleRelationship, group=groupRole, ...
Instruct this (user or group) Role to become a member of a group role. @param groupRole: The role that this group should become a member of.
def RotateServerKey(cn=u"grr", keylength=4096): """This function creates and installs a new server key. Note that - Clients might experience intermittent connection problems after the server keys rotated. - It's not possible to go back to an earlier key. Clients that see a new certificate will rememb...
This function creates and installs a new server key. Note that - Clients might experience intermittent connection problems after the server keys rotated. - It's not possible to go back to an earlier key. Clients that see a new certificate will remember the cert's serial number and refuse to accept ...
def dynamics(start, end=None): """ Apply dynamics to a sequence. If end is specified, it will crescendo or diminuendo linearly from start to end dynamics. You can pass any of these strings as dynamic markers: ['pppppp', 'ppppp', 'pppp', 'ppp', 'pp', 'p', 'mp', 'mf', 'f', 'ff', 'fff', ''ffff] Args: ...
Apply dynamics to a sequence. If end is specified, it will crescendo or diminuendo linearly from start to end dynamics. You can pass any of these strings as dynamic markers: ['pppppp', 'ppppp', 'pppp', 'ppp', 'pp', 'p', 'mp', 'mf', 'f', 'ff', 'fff', ''ffff] Args: start: beginning dynamic marker, if no...
def public_key(self): """ :return: The PublicKey object for the public key this certificate contains """ if not self._public_key and self.sec_certificate_ref: sec_public_key_ref_pointer = new(Security, 'SecKeyRef *') res = Security.SecCertificateCopyP...
:return: The PublicKey object for the public key this certificate contains
def _get_representative_batch(merged): """Prepare dictionary matching batch items to a representative within a group. """ out = {} for mgroup in merged: mgroup = sorted(list(mgroup)) for x in mgroup: out[x] = mgroup[0] return out
Prepare dictionary matching batch items to a representative within a group.
def as_check_request(self, timer=datetime.utcnow): """Makes a `ServicecontrolServicesCheckRequest` from this instance Returns: a ``ServicecontrolServicesCheckRequest`` Raises: ValueError: if the fields in this instance are insufficient to to create a valid ``Ser...
Makes a `ServicecontrolServicesCheckRequest` from this instance Returns: a ``ServicecontrolServicesCheckRequest`` Raises: ValueError: if the fields in this instance are insufficient to to create a valid ``ServicecontrolServicesCheckRequest``
def try_rgb(s, default=None): """ Try parsing a string into an rgb value (int, int, int), where the ints are 0-255 inclusive. If None is passed, default is returned. On failure, InvalidArg is raised. """ if not s: return default try: r, g, b = (int(x.strip()) for ...
Try parsing a string into an rgb value (int, int, int), where the ints are 0-255 inclusive. If None is passed, default is returned. On failure, InvalidArg is raised.