code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def weighted_mean_and_std(values, weights):
"""
Returns the weighted average and standard deviation.
values, weights -- numpy ndarrays with the same shape.
"""
average = np.average(values, weights=weights, axis=0)
variance = np.dot(weights, (values - average) ** 2) / weights.sum() # Fast and n... | Returns the weighted average and standard deviation.
values, weights -- numpy ndarrays with the same shape. |
def compute_plot_size(plot):
"""
Computes the size of bokeh models that make up a layout such as
figures, rows, columns, widgetboxes and Plot.
"""
if isinstance(plot, GridBox):
ndmapping = NdMapping({(x, y): fig for fig, y, x in plot.children}, kdims=['x', 'y'])
cols = ndmapping.grou... | Computes the size of bokeh models that make up a layout such as
figures, rows, columns, widgetboxes and Plot. |
def create_protocol(self):
"""Create a new protocol via the :attr:`protocol_factory`
This method increase the count of :attr:`sessions` and build
the protocol passing ``self`` as the producer.
"""
self.sessions += 1
protocol = self.protocol_factory(self)
protocol... | Create a new protocol via the :attr:`protocol_factory`
This method increase the count of :attr:`sessions` and build
the protocol passing ``self`` as the producer. |
def integrate(self, wavelengths=None, **kwargs):
"""Perform integration.
This uses any analytical integral that the
underlying model has (i.e., ``self.model.integral``).
If unavailable, it uses the default fall-back integrator
set in the ``default_integrator`` configuration item... | Perform integration.
This uses any analytical integral that the
underlying model has (i.e., ``self.model.integral``).
If unavailable, it uses the default fall-back integrator
set in the ``default_integrator`` configuration item.
If wavelengths are provided, flux or throughput i... |
async def get_guild_count(self, bot_id: int=None):
"""This function is a coroutine.
Gets a guild count from discordbots.org
Parameters
==========
bot_id: int[Optional]
The bot_id of the bot you want to lookup.
Defaults to the Bot provided in Client init... | This function is a coroutine.
Gets a guild count from discordbots.org
Parameters
==========
bot_id: int[Optional]
The bot_id of the bot you want to lookup.
Defaults to the Bot provided in Client init
Returns
=======
stats: dict
... |
def pr0_to_likelihood_array(outcomes, pr0):
"""
Assuming a two-outcome measurement with probabilities given by the
array ``pr0``, returns an array of the form expected to be returned by
``likelihood`` method.
:param numpy.ndarray outcomes: Array of integers indexing outc... | Assuming a two-outcome measurement with probabilities given by the
array ``pr0``, returns an array of the form expected to be returned by
``likelihood`` method.
:param numpy.ndarray outcomes: Array of integers indexing outcomes.
:param numpy.ndarray pr0: Array of shape ``(n_mode... |
def search_grouping(stmt, name):
"""Search for a grouping in scope
First search the hierarchy, then the module and its submodules."""
mod = stmt.i_orig_module
while stmt is not None:
if name in stmt.i_groupings:
g = stmt.i_groupings[name]
if (mod is not None and
... | Search for a grouping in scope
First search the hierarchy, then the module and its submodules. |
def parse_file_name_starting_position(self):
""" Returns (latitude, longitude) of lower left point of the file """
groups = mod_re.findall('([NS])(\d+)([EW])(\d+)\.hgt', self.file_name)
assert groups and len(groups) == 1 and len(groups[0]) == 4, 'Invalid file name {0}'.format(self.file_name)
... | Returns (latitude, longitude) of lower left point of the file |
def connect(servers=None, framed_transport=False, timeout=None,
retry_time=60, recycle=None, round_robin=None, max_retries=3):
"""
Constructs a single ElasticSearch connection. Connects to a randomly chosen
server on the list.
If the connection fails, it will attempt to connect to each serv... | Constructs a single ElasticSearch connection. Connects to a randomly chosen
server on the list.
If the connection fails, it will attempt to connect to each server on the
list in turn until one succeeds. If it is unable to find an active server,
it will throw a NoServerAvailable exception.
Failing ... |
def assert_credentials_match(self, verifier, authc_token, account):
"""
:type verifier: authc_abcs.CredentialsVerifier
:type authc_token: authc_abcs.AuthenticationToken
:type account: account_abcs.Account
:returns: account_abcs.Account
:raises IncorrectCredentialsExcepti... | :type verifier: authc_abcs.CredentialsVerifier
:type authc_token: authc_abcs.AuthenticationToken
:type account: account_abcs.Account
:returns: account_abcs.Account
:raises IncorrectCredentialsException: when authentication fails,
includin... |
def console_exec(thread_id, frame_id, expression, dbg):
"""returns 'False' in case expression is partially correct
"""
frame = dbg.find_frame(thread_id, frame_id)
is_multiline = expression.count('@LINE@') > 1
expression = str(expression.replace('@LINE@', '\n'))
# Not using frame.f_globals beca... | returns 'False' in case expression is partially correct |
def item_properties(self, handle):
"""Return properties of the item with the given handle."""
logger.debug("Getting properties for handle: {}".format(handle))
properties = {
'size_in_bytes': self.get_size_in_bytes(handle),
'utc_timestamp': self.get_utc_timestamp(handle),
... | Return properties of the item with the given handle. |
def _set_show_system_info(self, v, load=False):
"""
Setter method for show_system_info, mapped from YANG variable /brocade_ras_ext_rpc/show_system_info (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_system_info is considered as a private
method. Backends ... | Setter method for show_system_info, mapped from YANG variable /brocade_ras_ext_rpc/show_system_info (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_system_info is considered as a private
method. Backends looking to populate this variable should
do so via calli... |
def linspace(self, start, stop, n):
""" Simple replacement for numpy linspace"""
if n == 1: return [start]
L = [0.0] * n
nm1 = n - 1
nm1inv = 1.0 / nm1
for i in range(n):
L[i] = nm1inv * (start*(nm1 - i) + stop*i)
return L | Simple replacement for numpy linspace |
def report(self):
"""
Create reports of the findings
"""
# Initialise a variable to store the results
data = ''
for sample in self.metadata:
if sample[self.analysistype].primers != 'NA':
# Set the name of the strain-specific report
... | Create reports of the findings |
def distribution_compatible(dist, supported_tags=None):
"""Is this distribution compatible with the given interpreter/platform combination?
:param supported_tags: A list of tag tuples specifying which tags are supported
by the platform in question.
:returns: True if the distribution is compatible, False if i... | Is this distribution compatible with the given interpreter/platform combination?
:param supported_tags: A list of tag tuples specifying which tags are supported
by the platform in question.
:returns: True if the distribution is compatible, False if it is unrecognized or incompatible. |
def add_empty_fields(untl_dict):
"""Add empty values if UNTL fields don't have values."""
# Iterate the ordered UNTL XML element list to determine
# which elements are missing from the untl_dict.
for element in UNTL_XML_ORDER:
if element not in untl_dict:
# Try to create an element w... | Add empty values if UNTL fields don't have values. |
def parse_task_declaration(self, declaration_subAST):
'''
Parses the declaration section of the WDL task AST subtree.
Examples:
String my_name
String your_name
Int two_chains_i_mean_names = 0
:param declaration_subAST: Some subAST representing a task declaratio... | Parses the declaration section of the WDL task AST subtree.
Examples:
String my_name
String your_name
Int two_chains_i_mean_names = 0
:param declaration_subAST: Some subAST representing a task declaration
like: 'String file_name'
:ret... |
def indent(lines, amount, ch=' '):
"""Indent the lines in a string by padding each one with proper number of pad characters"""
padding = amount * ch
return padding + ('\n' + padding).join(lines.split('\n')) | Indent the lines in a string by padding each one with proper number of pad characters |
def _gassist_any(self,dg,dt,dt2,name,na=None,nodiag=False,memlimit=-1):
"""Calculates probability of gene i regulating gene j with genotype data assisted method,
with the recommended combination of multiple tests.
dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data.
Entry dg[i,j] is genotype i's v... | Calculates probability of gene i regulating gene j with genotype data assisted method,
with the recommended combination of multiple tests.
dg: numpy.ndarray(nt,ns,dtype=gtype(='u1' by default)) Genotype data.
Entry dg[i,j] is genotype i's value for sample j.
Each value must be among 0,1,...,na.
Genotype i must ... |
def main(path_dir, requirements_name):
"""Console script for imports."""
click.echo("\nWARNING: Uninstall libs it's at your own risk!")
click.echo('\nREMINDER: After uninstall libs, update your requirements '
'file.\nUse the `pip freeze > requirements.txt` command.')
click.echo('\n\nList... | Console script for imports. |
def NotificationsPost(self, parameters):
"""
Create a notification on CommonSense.
If successful the result, including the notification_id, can be obtained from getResponse(), and should be a json string.
@param parameters (dictionary) - Dictionary containin... | Create a notification on CommonSense.
If successful the result, including the notification_id, can be obtained from getResponse(), and should be a json string.
@param parameters (dictionary) - Dictionary containing the notification to create.
@note -
... |
def add_job(self, job):
"""Adds a new job into the cache.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute("INSERT INTO jobs VALUES(?,?,?,?,?)", (
job["id"], job["description"], job["last-run"], job["next-run"], job["last-run-result"]))
... | Adds a new job into the cache.
:param dict job: The job dictionary
:returns: True |
def _geolocation_extract(response):
"""
Mimics the exception handling logic in ``client._get_body``, but
for geolocation which uses a different response format.
"""
body = response.json()
if response.status_code in (200, 404):
return body
try:
error = body["error"]["errors"]... | Mimics the exception handling logic in ``client._get_body``, but
for geolocation which uses a different response format. |
def get_ips_by_equipment_and_environment(self, equip_nome, id_ambiente):
"""Search Group Equipment from by the identifier.
:param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero.
:return: Dictionary with the following structure:
::
{'equi... | Search Group Equipment from by the identifier.
:param id_egroup: Identifier of the Group Equipment. Integer value and greater than zero.
:return: Dictionary with the following structure:
::
{'equipaments':
[{'nome': < name_equipament >, 'grupos': < id_group >,
... |
def dict_to_numpy_array(d):
"""
Convert a dict of 1d array to a numpy recarray
"""
return fromarrays(d.values(), np.dtype([(str(k), v.dtype) for k, v in d.items()])) | Convert a dict of 1d array to a numpy recarray |
def _set_backreferences(self, context, items, **kwargs):
"""Set the back references on the linked items
This will set an annotation storage on the referenced items which point
to the current context.
"""
# Don't set any references during initialization.
# This might cau... | Set the back references on the linked items
This will set an annotation storage on the referenced items which point
to the current context. |
def save(self_or_cls, obj, basename, fmt='auto', key={}, info={}, options=None, **kwargs):
"""
Save a HoloViews object to file, either using an explicitly
supplied format or to the appropriate default.
"""
if info or key:
raise Exception('Renderer does not support sav... | Save a HoloViews object to file, either using an explicitly
supplied format or to the appropriate default. |
def validate_args(args):
"""
Call all required validation functions
:param args:
:return:
"""
if not os.path.isdir(args.directory):
print "Directory {} does not exist".format(args.directory)
sys.exit(5)
return args | Call all required validation functions
:param args:
:return: |
def run(path, code=None, params=None, **meta):
"""pydocstyle code checking.
:return list: List of errors.
"""
if 'ignore_decorators' in params:
ignore_decorators = params['ignore_decorators']
else:
ignore_decorators = None
check_source_args = (cod... | pydocstyle code checking.
:return list: List of errors. |
def _variant_po_to_dict(tokens) -> CentralDogma:
"""Convert a PyParsing data dictionary to a central dogma abundance (i.e., Protein, RNA, miRNA, Gene).
:type tokens: ParseResult
"""
dsl = FUNC_TO_DSL.get(tokens[FUNCTION])
if dsl is None:
raise ValueError('invalid tokens: {}'.format(tokens))... | Convert a PyParsing data dictionary to a central dogma abundance (i.e., Protein, RNA, miRNA, Gene).
:type tokens: ParseResult |
def _filter_insane_successors(self, successors):
"""
Throw away all successors whose target doesn't make sense
This method is called after we resolve an indirect jump using an unreliable method (like, not through one of
the indirect jump resolvers, but through either pure concrete execu... | Throw away all successors whose target doesn't make sense
This method is called after we resolve an indirect jump using an unreliable method (like, not through one of
the indirect jump resolvers, but through either pure concrete execution or backward slicing) to filter out the
obviously incorre... |
def stop_process(self):
"""
Stop the process (by killing it).
"""
if self.process is not None:
self._user_stop = True
self.process.kill()
self.setReadOnly(True)
self._running = False | Stop the process (by killing it). |
def csv_row_cleaner(rows):
"""
Clean row checking:
- Not empty row.
- >=1 element different in a row.
- row allready in cleaned row result.
"""
result = []
for row in rows:
# check not empty row
check_empty = len(exclude_empty_values(row)) > 1
# check more... | Clean row checking:
- Not empty row.
- >=1 element different in a row.
- row allready in cleaned row result. |
def _gather_from_files(self, config):
""" gathers from the files in a way that is convienent to use """
command_file = config.get_help_files()
cache_path = os.path.join(config.get_config_dir(), 'cache')
cols = _get_window_columns()
with open(os.path.join(cache_path, command_file... | gathers from the files in a way that is convienent to use |
def changeTo(self, path):
'''change value
Args:
path (str): the new environment path
'''
dictionary = DictSingle(Pair('PATH', StringSingle(path)))
self.value = [dictionary] | change value
Args:
path (str): the new environment path |
def print_stack_trace(proc_obj, count=None, color='plain', opts={}):
"Print count entries of the stack trace"
if count is None:
n=len(proc_obj.stack)
else:
n=min(len(proc_obj.stack), count)
try:
for i in range(n):
print_stack_entry(proc_obj, i, color=color, opts=opts)... | Print count entries of the stack trace |
def open_project(self, path=None, restart_consoles=True,
save_previous_files=True):
"""Open the project located in `path`"""
self.switch_to_plugin()
if path is None:
basedir = get_home_dir()
path = getexistingdirectory(parent=self,
... | Open the project located in `path` |
def process_record_dataset(dataset, is_training, batch_size, shuffle_buffer,
parse_record_fn, num_epochs=1, num_gpus=None,
examples_per_epoch=None, dtype=tf.float32):
"""Given a Dataset with raw records, return an iterator over the records.
Args:
dataset: A... | Given a Dataset with raw records, return an iterator over the records.
Args:
dataset: A Dataset representing raw records
is_training: A boolean denoting whether the input is for training.
batch_size: The number of samples per batch.
shuffle_buffer: The buffer size to use when shuffling records. A lar... |
def object_info(lcc_server, objectid, db_collection_id):
'''This gets information on a single object from the LCC-Server.
Returns a dict with all of the available information on an object, including
finding charts, comments, object type and variability tags, and
period-search results (if available).
... | This gets information on a single object from the LCC-Server.
Returns a dict with all of the available information on an object, including
finding charts, comments, object type and variability tags, and
period-search results (if available).
If you have an LCC-Server API key present in `~/.astrobase/lc... |
def get(url, params=None, **kwargs):
r"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return... | r"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
... |
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[... | Default converter for the cfg:// protocol. |
def get(self, key):
"""Get a key-value from storage according to the key name.
"""
data = r_kv.get(key)
# data = json.dumps(data) if isinstance(data, str) else data
# data = json.loads(data) if data else {}
return build_response(dict(data=data, code=200)) | Get a key-value from storage according to the key name. |
def compounding(start, stop, compound):
"""Yield an infinite series of compounding values. Each time the
generator is called, a value is produced by multiplying the previous
value by the compound rate.
EXAMPLE:
>>> sizes = compounding(1., 10., 1.5)
>>> assert next(sizes) == 1.
>>> ass... | Yield an infinite series of compounding values. Each time the
generator is called, a value is produced by multiplying the previous
value by the compound rate.
EXAMPLE:
>>> sizes = compounding(1., 10., 1.5)
>>> assert next(sizes) == 1.
>>> assert next(sizes) == 1 * 1.5
>>> assert nex... |
def assert_equals(actual, expected, ignore_order=False, ignore_index=False, all_close=False):
'''
Assert 2 series are equal.
Like ``assert equals(series1, series2, ...)``, but with better hints at
where the series differ. See `equals` for
detailed parameter doc.
Parameters
----------
a... | Assert 2 series are equal.
Like ``assert equals(series1, series2, ...)``, but with better hints at
where the series differ. See `equals` for
detailed parameter doc.
Parameters
----------
actual : ~pandas.Series
expected : ~pandas.Series
ignore_order : bool
ignore_index : bool
a... |
def collection_choices():
"""Return collection choices."""
from invenio_collections.models import Collection
return [(0, _('-None-'))] + [
(c.id, c.name) for c in Collection.query.all()
] | Return collection choices. |
def complain(error):
"""Raises in develop; warns in release."""
if callable(error):
if DEVELOP:
raise error()
elif DEVELOP:
raise error
else:
logger.warn_err(error) | Raises in develop; warns in release. |
def transcode_to_utf8(filename, encoding):
"""
Convert a file in some other encoding into a temporary file that's in
UTF-8.
"""
tmp = tempfile.TemporaryFile()
for line in io.open(filename, encoding=encoding):
tmp.write(line.strip('\uFEFF').encode('utf-8'))
tmp.seek(0)
return tmp | Convert a file in some other encoding into a temporary file that's in
UTF-8. |
def download(self):
"""
Walk from the current ledger index to the genesis ledger index,
and download transactions from rippled.
"""
self.housekeeping()
self.rippled_history()
if self.resampling_frequencies is not None:
self.find_markets()
s... | Walk from the current ledger index to the genesis ledger index,
and download transactions from rippled. |
def bg_compensate(img, sigma, splinepoints, scale):
'''Reads file, subtracts background. Returns [compensated image, background].'''
from PIL import Image
import pylab
from matplotlib.image import pil_to_array
from centrosome.filter import canny
import matplotlib
img = Image.open(img)
... | Reads file, subtracts background. Returns [compensated image, background]. |
def actually_possibly_award(self, **state):
"""
Does the actual work of possibly awarding a badge.
"""
user = state["user"]
force_timestamp = state.pop("force_timestamp", None)
awarded = self.award(**state)
if awarded is None:
return
if awarded... | Does the actual work of possibly awarding a badge. |
def run_qsnp(align_bams, items, ref_file, assoc_files, region=None,
out_file=None):
"""Run qSNP calling on paired tumor/normal.
"""
if utils.file_exists(out_file):
return out_file
paired = get_paired_bams(align_bams, items)
if paired.normal_bam:
region_files = []
... | Run qSNP calling on paired tumor/normal. |
def fill_phenotype_calls(self,phenotypes=None,inplace=False):
"""
Set the phenotype_calls according to the phenotype names
"""
if phenotypes is None: phenotypes = list(self['phenotype_label'].unique())
def _get_calls(label,phenos):
d = dict([(x,0) for x in phenos])
... | Set the phenotype_calls according to the phenotype names |
def _export_project_file(project, path, z, include_images, keep_compute_id, allow_all_nodes, temporary_dir):
"""
Take a project file (.gns3) and patch it for the export
We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
:param path: Path of the .gns3
"""
#... | Take a project file (.gns3) and patch it for the export
We rename the .gns3 project.gns3 to avoid the task to the client to guess the file name
:param path: Path of the .gns3 |
def version(self, path, postmap=None, **params):
"""
Return the taskforce version.
Supports standard options.
"""
q = httpd.merge_query(path, postmap)
ans = {
'taskforce': taskforce_version,
'python': '.'.join(str(x) for x in sys.version_info[:3]),
... | Return the taskforce version.
Supports standard options. |
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = sel... | Resolve the current expression against the supplied value |
def list_ip(self, instance_id):
"""Add all IPs"""
output = self.client.describe_instances(InstanceIds=[instance_id])
output = output.get("Reservations")[0].get("Instances")[0]
ips = {}
ips['PrivateIp'] = output.get("PrivateIpAddress")
ips['PublicIp'] = output.get("PublicI... | Add all IPs |
def create_role(self, **kwargs):
"""Creates and returns a new role from the given parameters."""
role = self.role_model(**kwargs)
return self.put(role) | Creates and returns a new role from the given parameters. |
def reset_password(self, token):
"""
View function verify a users reset password token from the email we sent to them.
It also handles the form for them to set a new password.
Supports html and json requests.
"""
expired, invalid, user = \
self.security_utils_... | View function verify a users reset password token from the email we sent to them.
It also handles the form for them to set a new password.
Supports html and json requests. |
def element_abund_marco(i_decay, stable_isotope_list,
stable_isotope_identifier,
mass_fractions_array_not_decayed,
mass_fractions_array_decayed):
'''
Given an array of isotopic abundances not decayed and a similar
array of isotopic abun... | Given an array of isotopic abundances not decayed and a similar
array of isotopic abundances not decayed, here elements abundances,
and production factors for elements are calculated |
def _grow(growth, walls, target, i, j, steps, new_steps, res):
'''
fills [res] with [distance to next position where target == 1,
x coord.,
y coord. of that position in target]
using region growth
i,j -> pixel position
growth -> a work array, ne... | fills [res] with [distance to next position where target == 1,
x coord.,
y coord. of that position in target]
using region growth
i,j -> pixel position
growth -> a work array, needed to measure the distance
steps, new_steps -> current and last posit... |
def lchisqprob(chisq,df):
"""
Returns the (1-tailed) probability value associated with the provided
chi-square value and df. Adapted from chisq.c in Gary Perlman's |Stat.
Usage: lchisqprob(chisq,df)
"""
BIG = 20.0
def ex(x):
BIG = 20.0
if x < -BIG:
return 0.0
else:
... | Returns the (1-tailed) probability value associated with the provided
chi-square value and df. Adapted from chisq.c in Gary Perlman's |Stat.
Usage: lchisqprob(chisq,df) |
def drop_constant_column_levels(df):
"""
drop the levels of a multi-level column dataframe which are constant
operates in place
"""
columns = df.columns
constant_levels = [i for i, level in enumerate(columns.levels) if len(level) <= 1]
constant_levels.reverse()
for i in constant_levels:... | drop the levels of a multi-level column dataframe which are constant
operates in place |
def bridge_to_vlan(br):
'''
Returns the VLAN ID of a bridge.
Args:
br: A string - bridge name
Returns:
VLAN ID of the bridge. The VLAN ID is 0 if the bridge is not a fake
bridge. If the bridge does not exist, False is returned.
CLI Example:
.. code-block:: bash
... | Returns the VLAN ID of a bridge.
Args:
br: A string - bridge name
Returns:
VLAN ID of the bridge. The VLAN ID is 0 if the bridge is not a fake
bridge. If the bridge does not exist, False is returned.
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_to_p... |
def start_action(logger=None, action_type="", _serializers=None, **fields):
"""
Create a child L{Action}, figuring out the parent L{Action} from execution
context, and log the start message.
You can use the result as a Python context manager, or use the
L{Action.finish} API to explicitly finish it.... | Create a child L{Action}, figuring out the parent L{Action} from execution
context, and log the start message.
You can use the result as a Python context manager, or use the
L{Action.finish} API to explicitly finish it.
with start_action(logger, "yourapp:subsystem:dosomething",
... |
async def _async_get_sshable_ips(self, ip_addresses):
"""Return list of all IP address that could be pinged."""
async def _async_ping(ip_address):
try:
reader, writer = await asyncio.wait_for(
asyncio.open_connection(ip_address, 22), timeout=5)
... | Return list of all IP address that could be pinged. |
def get_section_by_rva(self, rva):
"""Get the section containing the given address."""
for section in self.sections:
if section.contains_rva(rva):
return section
return None | Get the section containing the given address. |
def _resolve_plt(self, addr, irsb, indir_jump):
"""
Determine if the IRSB at the given address is a PLT stub. If it is, concretely execute the basic block to
resolve the jump target.
:param int addr: Address of the block.
:param irsb: The basic ... | Determine if the IRSB at the given address is a PLT stub. If it is, concretely execute the basic block to
resolve the jump target.
:param int addr: Address of the block.
:param irsb: The basic block.
:param IndirectJump indir_jump: The IndirectJump inst... |
def set_orthogonal_selection(self, selection, value, fields=None):
"""Modify data via a selection for each dimension of the array.
Parameters
----------
selection : tuple
A selection for each dimension of the array. May be any combination of int,
slice, integer a... | Modify data via a selection for each dimension of the array.
Parameters
----------
selection : tuple
A selection for each dimension of the array. May be any combination of int,
slice, integer array or Boolean array.
value : scalar or array-like
Value ... |
def ParseFileObject(self, parser_mediator, file_object):
"""Parses a NTFS $UsnJrnl metadata file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): file-like object.
""... | Parses a NTFS $UsnJrnl metadata file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): file-like object. |
def get_category(self, id, **data):
"""
GET /categories/:id/
Gets a :format:`category` by ID as ``category``.
"""
return self.get("/categories/{0}/".format(id), data=data) | GET /categories/:id/
Gets a :format:`category` by ID as ``category``. |
def log_response(response: str, trim_log_values: bool = False, **kwargs: Any) -> None:
"""Log a response"""
return log_(response, response_logger, logging.INFO, trim=trim_log_values, **kwargs) | Log a response |
def on_reset_compat_defaults_clicked(self, bnt):
"""Reset default values to compat_{backspace,delete} dconf
keys. The default values are retrivied from the guake.schemas
file.
"""
self.settings.general.reset('compat-backspace')
self.settings.general.reset('compat-delete')... | Reset default values to compat_{backspace,delete} dconf
keys. The default values are retrivied from the guake.schemas
file. |
def name_for_scalar_relationship(base, local_cls, referred_cls, constraint):
""" Overriding naming schemes. """
name = referred_cls.__name__.lower() + "_ref"
return name | Overriding naming schemes. |
def credit_note(request, note_id, access_code=None):
''' Displays a credit note.
If ``request`` is a ``POST`` request, forms for applying or refunding
a credit note will be processed.
This view requires a login, and the logged in user must be staff.
Arguments:
note_id (castable to int): T... | Displays a credit note.
If ``request`` is a ``POST`` request, forms for applying or refunding
a credit note will be processed.
This view requires a login, and the logged in user must be staff.
Arguments:
note_id (castable to int): The ID of the credit note to view.
Returns:
rende... |
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
if self.is_finite:
return reduce(mul, [domain.n_members for domain in self._domains], 1)
else:
... | Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf`` |
def inter_event_time_distribution(self, u=None, v=None):
"""Return the distribution of inter event time.
If u and v are None the dynamic graph intere event distribution is returned.
If u is specified the inter event time distribution of interactions involving u is returned.
If u and v ar... | Return the distribution of inter event time.
If u and v are None the dynamic graph intere event distribution is returned.
If u is specified the inter event time distribution of interactions involving u is returned.
If u and v are specified the inter event time distribution of (u, v) interactions... |
def create(self, to, from_, method=values.unset, fallback_url=values.unset,
fallback_method=values.unset, status_callback=values.unset,
status_callback_event=values.unset,
status_callback_method=values.unset, send_digits=values.unset,
timeout=values.unset, rec... | Create a new CallInstance
:param unicode to: Phone number, SIP address, or client identifier to call
:param unicode from_: Twilio number from which to originate the call
:param unicode method: HTTP method to use to fetch TwiML
:param unicode fallback_url: Fallback URL in case of error
... |
def dataset_path(cache=None, cachefile="~/.io3d_cache.yaml", get_root=False):
"""Get dataset path.
:param cache: CacheFile object
:param cachefile: cachefile path, default '~/.io3d_cache.yaml'
:return: path to dataset
"""
local_data_dir = local_dir
if cachefile is not None:
cache =... | Get dataset path.
:param cache: CacheFile object
:param cachefile: cachefile path, default '~/.io3d_cache.yaml'
:return: path to dataset |
def is_dsub_operation(cls, op):
"""Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and u... | Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have always existed
- envs: ... |
def run_nested(self, nlive_init=500, maxiter_init=None,
maxcall_init=None, dlogz_init=0.01, logl_max_init=np.inf,
nlive_batch=500, wt_function=None, wt_kwargs=None,
maxiter_batch=None, maxcall_batch=None,
maxiter=None, maxcall=None, maxbatch=No... | **The main dynamic nested sampling loop.** After an initial "baseline"
run using a constant number of live points, dynamically allocates
additional (nested) samples to optimize a specified weight function
until a specified stopping criterion is reached.
Parameters
----------
... |
def get_objective_bank_admin_session(self, proxy, *args, **kwargs):
"""Gets the OsidSession associated with the objective bank administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ObjectiveBankAdminSession``
:rtype: ``osid.learning.Object... | Gets the OsidSession associated with the objective bank administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ObjectiveBankAdminSession``
:rtype: ``osid.learning.ObjectiveBankAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``... |
def _find_max_lag(x, rho_limit=0.05, maxmaxlag=20000, verbose=0):
"""Automatically find an appropriate maximum lag to calculate IAT"""
# Fetch autocovariance matrix
acv = autocov(x)
# Calculate rho
rho = acv[0, 1] / acv[0, 0]
lam = -1. / np.log(abs(rho))
# Initial guess at 1.5 times lambd... | Automatically find an appropriate maximum lag to calculate IAT |
def run_ipython_notebook(notebook_str):
"""
References:
https://github.com/paulgb/runipy
>>> from utool.util_ipynb import * # NOQA
"""
from runipy.notebook_runner import NotebookRunner
import nbformat
import logging
log_format = '%(asctime)s %(levelname)s: %(message)s'
l... | References:
https://github.com/paulgb/runipy
>>> from utool.util_ipynb import * # NOQA |
def _calc(self, y, w):
'''Helper to estimate spatial lag conditioned Markov transition
probability matrices based on maximum likelihood techniques.
'''
if self.discrete:
self.lclass_ids = weights.lag_categorical(w, self.class_ids,
... | Helper to estimate spatial lag conditioned Markov transition
probability matrices based on maximum likelihood techniques. |
def Ctrl_C(self, delay=0):
"""Ctrl + C shortcut.
"""
self._delay(delay)
self.add(Command("KeyDown", 'KeyDown "%s", %s' % (BoardKey.Ctrl, 1)))
self.add(Command("KeyPress", 'KeyPress "%s", %s' % (BoardKey.C, 1)))
self.add(Command("KeyUp", 'KeyUp "%s", %s' % (BoardKey.Ctrl, ... | Ctrl + C shortcut. |
def move_dirty_lock_file(dirty_lock_file, sm_path):
""" Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore """
if dirty_lock_file is not None \
and not dirty_lock_file == os.path.join(sm_path, dirty_lock_file.split(os.sep)[-1]):
logger.debug("M... | Move the dirt_lock file to the sm_path and thereby is not found by auto recovery of backup anymore |
def marv(ctx, config, loglevel, logfilter, verbosity):
"""Manage a Marv site"""
if config is None:
cwd = os.path.abspath(os.path.curdir)
while cwd != os.path.sep:
config = os.path.join(cwd, 'marv.conf')
if os.path.exists(config):
break
cwd = os... | Manage a Marv site |
def network_info(host=None,
admin_username=None,
admin_password=None,
module=None):
'''
Return Network Configuration
CLI Example:
.. code-block:: bash
salt dell dracr.network_info
'''
inv = inventory(host=host, admin_username=admin_u... | Return Network Configuration
CLI Example:
.. code-block:: bash
salt dell dracr.network_info |
def guess_mime_type(url):
"""Use the mimetypes module to lookup the type for an extension.
This function also adds some extensions required for HTML5
"""
(mimetype, _mimeencoding) = mimetypes.guess_type(url)
if not mimetype:
ext = os.path.splitext(url)[1]
mimetype = _MIME_TYPES.get(... | Use the mimetypes module to lookup the type for an extension.
This function also adds some extensions required for HTML5 |
def generate_confirmation_token(self, user):
"""
Generates a unique confirmation token for the specified user.
:param user: The user to work with
"""
data = [str(user.id), self.hash_data(user.email)]
return self.security.confirm_serializer.dumps(data) | Generates a unique confirmation token for the specified user.
:param user: The user to work with |
def plot_slab(slab, ax, scale=0.8, repeat=5, window=1.5,
draw_unit_cell=True, decay=0.2, adsorption_sites=True):
"""
Function that helps visualize the slab in a 2-D plot, for
convenient viewing of output of AdsorbateSiteFinder.
Args:
slab (slab): Slab object to be visualized
... | Function that helps visualize the slab in a 2-D plot, for
convenient viewing of output of AdsorbateSiteFinder.
Args:
slab (slab): Slab object to be visualized
ax (axes): matplotlib axes with which to visualize
scale (float): radius scaling for sites
repeat (int): number of repea... |
def _setup_cgroups(self, my_cpus, memlimit, memory_nodes, cgroup_values):
"""
This method creates the CGroups for the following execution.
@param my_cpus: None or a list of the CPU cores to use
@param memlimit: None or memory limit in bytes
@param memory_nodes: None or a list of ... | This method creates the CGroups for the following execution.
@param my_cpus: None or a list of the CPU cores to use
@param memlimit: None or memory limit in bytes
@param memory_nodes: None or a list of memory nodes of a NUMA system to use
@param cgroup_values: dict of additional values t... |
def query_extensions(self, extension_query, account_token=None, account_token_header=None):
"""QueryExtensions.
[Preview API]
:param :class:`<ExtensionQuery> <azure.devops.v5_1.gallery.models.ExtensionQuery>` extension_query:
:param str account_token:
:param String account_token_... | QueryExtensions.
[Preview API]
:param :class:`<ExtensionQuery> <azure.devops.v5_1.gallery.models.ExtensionQuery>` extension_query:
:param str account_token:
:param String account_token_header: Header to pass the account token
:rtype: :class:`<ExtensionQueryResult> <azure.devops.v... |
def splits(self):
''' Splits '''
if not self.__splits_aggregate:
self.__splits_aggregate = SplitsAggregate(self.book)
return self.__splits_aggregate | Splits |
def encode(self):
"""Encode the packet's buffer from the instance variables."""
tftpassert(self.filename, "filename required in initial packet")
tftpassert(self.mode, "mode required in initial packet")
# Make sure filename and mode are bytestrings.
filename = self.filename
... | Encode the packet's buffer from the instance variables. |
def iMath(image, operation, *args):
"""
Perform various (often mathematical) operations on the input image/s.
Additional parameters should be specific for each operation.
See the the full iMath in ANTs, on which this function is based.
ANTsR function: `iMath`
Arguments
---------
image ... | Perform various (often mathematical) operations on the input image/s.
Additional parameters should be specific for each operation.
See the the full iMath in ANTs, on which this function is based.
ANTsR function: `iMath`
Arguments
---------
image : ANTsImage
input object, usually antsIm... |
def validate_single_matching_uri(all_blockchain_uris: List[str], w3: Web3) -> str:
"""
Return a single block URI after validating that it is the *only* URI in
all_blockchain_uris that matches the w3 instance.
"""
matching_uris = [
uri for uri in all_blockchain_uris if check_if_chain_matches_... | Return a single block URI after validating that it is the *only* URI in
all_blockchain_uris that matches the w3 instance. |
def _uniqualize(d):
'''
d = {1:'a',2:'b',3:'c',4:'b'}
_uniqualize(d)
'''
pt = copy.deepcopy(d)
seqs_for_del =[]
vset = set({})
for k in pt:
vset.add(pt[k])
tslen = vset.__len__()
freq = {}
for k in pt:
v = pt[k]
if(v in freq):
freq[... | d = {1:'a',2:'b',3:'c',4:'b'}
_uniqualize(d) |
def _onDocstring( self, docstr, line ):
" Memorizes a function/class/module docstring "
if self.objectsStack:
self.objectsStack[ -1 ].docstring = \
Docstring( trim_docstring( docstr ), line )
return
self.docstring = Docstring( trim_docstr... | Memorizes a function/class/module docstring |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.