code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _check_for_eltorito_boot_info_table(self, ino):
# type: (inode.Inode) -> None
'''
An internal method to check a boot directory record to see if it has
an El Torito Boot Info Table embedded inside of it.
Parameters:
ino - The Inode to check for a Boot Info Table.
... | An internal method to check a boot directory record to see if it has
an El Torito Boot Info Table embedded inside of it.
Parameters:
ino - The Inode to check for a Boot Info Table.
Returns:
Nothing. |
def launch_slurm(jobname: str,
cmd: str,
memory_mb: int,
project: str,
qos: str,
email: str,
duration: timedelta,
tasks_per_node: int,
cpus_per_task: int,
partition: s... | Launch a job into the SLURM environment.
Args:
jobname: name of the job
cmd: command to be executed
memory_mb: maximum memory requirement per process (Mb)
project: project name
qos: quality-of-service name
email: user's e-mail address
duration: maximum durati... |
def safe_sparse_dot(a, b, dense_output=False):
"""Dot product that handle the sparse matrix case correctly
Uses BLAS GEMM as replacement for numpy.dot where possible
to avoid unnecessary copies.
Parameters
----------
a : array or sparse matrix
b : array or sparse matrix
dense_output : b... | Dot product that handle the sparse matrix case correctly
Uses BLAS GEMM as replacement for numpy.dot where possible
to avoid unnecessary copies.
Parameters
----------
a : array or sparse matrix
b : array or sparse matrix
dense_output : boolean, default False
When False, either ``a``... |
def lookup_values_from_error_table(scores, err_df):
""" Find matching q-value for each score in 'scores' """
ix = find_nearest_matches(np.float32(err_df.cutoff.values), np.float32(scores))
return err_df.pvalue.iloc[ix].values, err_df.svalue.iloc[ix].values, err_df.pep.iloc[ix].values, err_df.qvalue.iloc[ix]... | Find matching q-value for each score in 'scores' |
def addGaussNoise(self, sigma):
"""
Add gaussian noise.
:param float sigma: sigma is expressed in percent of the diagonal size of actor.
:Example:
.. code-block:: python
from vtkplotter import Sphere
Sphe... | Add gaussian noise.
:param float sigma: sigma is expressed in percent of the diagonal size of actor.
:Example:
.. code-block:: python
from vtkplotter import Sphere
Sphere().addGaussNoise(1.0).show() |
def get_dump(self, fmap='', with_stats=False):
"""
Returns the dump the model as a list of strings.
"""
length = ctypes.c_ulong()
sarr = ctypes.POINTER(ctypes.c_char_p)()
if self.feature_names is not None and fmap == '':
flen = int(len(self.feature_names))
... | Returns the dump the model as a list of strings. |
def to_grey(self, on: bool=False):
"""
Change the LED to grey.
:param on: Unused, here for API consistency with the other states
:return: None
"""
self._on = False
self._load_new(led_grey) | Change the LED to grey.
:param on: Unused, here for API consistency with the other states
:return: None |
def fit(self, blocks, y=None):
"""
Fit a k-means clustering model using an ordered sequence of blocks.
"""
self.kmeans.fit(make_weninger_features(blocks))
# set the cluster center closest to the origin to exactly (0.0, 0.0)
self.kmeans.cluster_centers_.sort(axis=0)
... | Fit a k-means clustering model using an ordered sequence of blocks. |
def ParseMultiple(self, stats, file_objs, kb):
"""Process files together."""
fileset = {stat.pathspec.path: obj for stat, obj in zip(stats, file_objs)}
return self.ParseFileset(fileset) | Process files together. |
def zero_datetime(dt, tz=None):
"""
Return the given datetime with hour/minutes/seconds/ms zeroed and the
timezone coerced to the given ``tz`` (or UTC if none is given).
"""
if tz is None:
tz = get_current_timezone()
return coerce_naive(dt).replace(hour=0, minute=0, second=0, microsecond... | Return the given datetime with hour/minutes/seconds/ms zeroed and the
timezone coerced to the given ``tz`` (or UTC if none is given). |
def pandas(self):
"""Return a Pandas dataframe."""
if self._pandas is None:
self._pandas = pd.DataFrame().from_records(self.list_of_dicts)
return self._pandas | Return a Pandas dataframe. |
def hs_join(ls_hsi, hso):
""" [Many-to-one] Synchronizes (joins) a list of input handshake interfaces: output is ready when ALL inputs are ready
ls_hsi - (i) list of input handshake tuples (ready, valid)
hso - (o) an output handshake tuple (ready, valid)
"""
N = len(ls_hsi)
ls... | [Many-to-one] Synchronizes (joins) a list of input handshake interfaces: output is ready when ALL inputs are ready
ls_hsi - (i) list of input handshake tuples (ready, valid)
hso - (o) an output handshake tuple (ready, valid) |
def _add_styles(self, add_paragraph=True, add_text=True):
"""
Adds paragraph and span wrappers if necessary based on style
"""
p_styles = self.get_para_styles()
t_styles = self.get_span_styles()
for s in self.slide.pending_styles:
if isinstance(s, ParagraphSty... | Adds paragraph and span wrappers if necessary based on style |
def make_data(n,m):
"""make_data: prepare matrix of m times n random processing times"""
p = {}
for i in range(1,m+1):
for j in range(1,n+1):
p[i,j] = random.randint(1,10)
return p | make_data: prepare matrix of m times n random processing times |
def basic_stats(G, area=None, clean_intersects=False, tolerance=15,
circuity_dist='gc'):
"""
Calculate basic descriptive metric and topological stats for a graph.
For an unprojected lat-lng graph, tolerance and graph units should be in
degrees, and circuity_dist should be 'gc'. For a pr... | Calculate basic descriptive metric and topological stats for a graph.
For an unprojected lat-lng graph, tolerance and graph units should be in
degrees, and circuity_dist should be 'gc'. For a projected graph, tolerance
and graph units should be in meters (or similar) and circuity_dist should be
'euclid... |
def create_connection(port=_PORT_, timeout=_TIMEOUT_, restart=False):
"""
Create Bloomberg connection
Returns:
(Bloomberg connection, if connection is new)
"""
if _CON_SYM_ in globals():
if not isinstance(globals()[_CON_SYM_], pdblp.BCon):
del globals()[_CON_SYM_]
i... | Create Bloomberg connection
Returns:
(Bloomberg connection, if connection is new) |
def stop(self, timeout=None):
"""Stop the thread."""
logger.debug("docker plugin - Close thread for container {}".format(self._container.name))
self._stopper.set() | Stop the thread. |
def on_message(self, con, event):
"""Handles messge stanzas"""
msg_type = event.getType()
nick = event.getFrom().getResource()
from_jid = event.getFrom().getStripped()
body = event.getBody()
if msg_type == 'chat' and body is None:
return
logger.debug... | Handles messge stanzas |
def GetStructFormatString(self):
"""Retrieves the Python struct format string.
Returns:
str: format string as used by Python struct or None if format string
cannot be determined.
"""
if not self._element_data_type_map:
return None
number_of_elements = None
if self._data_t... | Retrieves the Python struct format string.
Returns:
str: format string as used by Python struct or None if format string
cannot be determined. |
def create_schema(self, model, waiting_models):
"""
Creates search schemas.
Args:
model: model to execute
waiting_models: if riak can't return response immediately, model is taken to queue.
After first execution session, method is executed with waiting models... | Creates search schemas.
Args:
model: model to execute
waiting_models: if riak can't return response immediately, model is taken to queue.
After first execution session, method is executed with waiting models and controlled.
And be ensured that all given models ar... |
def generate_local_url(self, js_name):
"""
Generate the local url for a js file.
:param js_name:
:return:
"""
host = self._settings['local_host'].format(**self._host_context).rstrip('/')
return '{}/{}.js'.format(host, js_name) | Generate the local url for a js file.
:param js_name:
:return: |
def toxml(self):
"""
Exports this object into a LEMS XML object
"""
return '<ComponentRequirement name="{0}"'.format(self.name) + '' + \
(' description = "{0}"'.format(self.description) if self.description else '') +\
'/>' | Exports this object into a LEMS XML object |
def run(self):
""" Index the document. Since ids are predictable,
we won't index anything twice. """
with self.input().open() as handle:
body = json.loads(handle.read())
es = elasticsearch.Elasticsearch()
id = body.get('_id')
es.index(index='frontpage', do... | Index the document. Since ids are predictable,
we won't index anything twice. |
def _add_relations(self, relations):
"""Add all of the relations for the services."""
for k, v in six.iteritems(relations):
self.d.relate(k, v) | Add all of the relations for the services. |
def process_streamers(self):
"""Check if any streamers should be handed to the stream manager."""
# Check for any triggered streamers and pass them to stream manager
in_progress = self._stream_manager.in_progress()
triggered = self.graph.check_streamers(blacklist=in_progress)
f... | Check if any streamers should be handed to the stream manager. |
def setbridgeprio(self, prio):
""" Set bridge priority value. """
_runshell([brctlexe, 'setbridgeprio', self.name, str(prio)],
"Could not set bridge priority in %s." % self.name) | Set bridge priority value. |
def generate_tensor_filename(self, field_name, file_num, compressed=True):
""" Generate a filename for a tensor. """
file_ext = TENSOR_EXT
if compressed:
file_ext = COMPRESSED_TENSOR_EXT
filename = os.path.join(self.filename, 'tensors', '%s_%05d%s' %(field_name, file_num, fil... | Generate a filename for a tensor. |
def start(self, use_atexit=True):
'''Start the executable.
Args:
use_atexit (bool): If True, the process will automatically be
terminated at exit.
'''
assert not self._process
_logger.debug('Starting process %s', self._proc_args)
process_fut... | Start the executable.
Args:
use_atexit (bool): If True, the process will automatically be
terminated at exit. |
def _item_keys_match(crypto_config, item1, item2):
# type: (CryptoConfig, Dict, Dict) -> Bool
"""Determines whether the values in the primary and sort keys (if they exist) are the same
:param CryptoConfig crypto_config: CryptoConfig used in encrypting the given items
:param dict item1: The first item t... | Determines whether the values in the primary and sort keys (if they exist) are the same
:param CryptoConfig crypto_config: CryptoConfig used in encrypting the given items
:param dict item1: The first item to compare
:param dict item2: The second item to compare
:return: Bool response, True if the key a... |
def refresh_address_presence(self, address):
"""
Update synthesized address presence state from cached user presence states.
Triggers callback (if any) in case the state has changed.
This method is only provided to cover an edge case in our use of the Matrix protocol and
should... | Update synthesized address presence state from cached user presence states.
Triggers callback (if any) in case the state has changed.
This method is only provided to cover an edge case in our use of the Matrix protocol and
should **not** generally be used. |
def close(self):
"""Release libpci resources."""
if self._access is not None:
_logger.debug("Cleaning up")
pci_cleanup(self._access)
self._access = None | Release libpci resources. |
def create_address(cls, address, **kwargs):
"""Create Address
Create a new Address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_address(address, async=True)
>>> result = thre... | Create Address
Create a new Address
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_address(address, async=True)
>>> result = thread.get()
:param async bool
:param Addr... |
def _lowfreq_linear_filter(tumor_index, is_paired):
"""Linear classifier for removing low frequency false positives.
Uses a logistic classifier based on 0.5% tumor only variants from the smcounter2 paper:
https://github.com/bcbio/bcbio_validations/tree/master/somatic-lowfreq
The classifier uses stran... | Linear classifier for removing low frequency false positives.
Uses a logistic classifier based on 0.5% tumor only variants from the smcounter2 paper:
https://github.com/bcbio/bcbio_validations/tree/master/somatic-lowfreq
The classifier uses strand bias (SBF) and read mismatches (NM) and
applies only ... |
def selected(self, new):
"""Set selected from list or instance of object or name.
Over-writes existing selection
"""
def preprocess(item):
if isinstance(item, str):
return self.options[item]
return item
items = coerce_to_list(new, preproce... | Set selected from list or instance of object or name.
Over-writes existing selection |
def get_first_recipient_with_address(self):
""" Returns the first recipient found with a non blank address
:return: First Recipient
:rtype: Recipient
"""
recipients_with_address = [recipient for recipient in self._recipients
if recipient.addres... | Returns the first recipient found with a non blank address
:return: First Recipient
:rtype: Recipient |
def safe_listdir(path):
"""
Attempt to list contents of path, but suppress some exceptions.
"""
try:
return os.listdir(path)
except (PermissionError, NotADirectoryError):
pass
except OSError as e:
# Ignore the directory if does not exist, not a directory or
# perm... | Attempt to list contents of path, but suppress some exceptions. |
def on_canvas_slave__electrode_pair_selected(self, slave, data):
'''
Process pair of selected electrodes.
For now, this consists of finding the shortest path between the two
electrodes and appending it to the list of droplet routes for the
current step.
Note that the dr... | Process pair of selected electrodes.
For now, this consists of finding the shortest path between the two
electrodes and appending it to the list of droplet routes for the
current step.
Note that the droplet routes for a step are stored in a frame/table in
the `DmfDeviceControll... |
def det_4x3(a,b,c,d):
'''
det_4x3(a,b,c,d) yields the determinate of the matrix formed the given rows, which may have
more than 1 dimension, in which case the later dimensions are multiplied and added point-wise.
The point's must be 3D points; the matrix is given a fourth column of 1s and the result... | det_4x3(a,b,c,d) yields the determinate of the matrix formed the given rows, which may have
more than 1 dimension, in which case the later dimensions are multiplied and added point-wise.
The point's must be 3D points; the matrix is given a fourth column of 1s and the resulting
determinant is of this m... |
def update(self, read, write, manage):
"""
Update the SyncListPermissionInstance
:param bool read: Read access.
:param bool write: Write access.
:param bool manage: Manage access.
:returns: Updated SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.s... | Update the SyncListPermissionInstance
:param bool read: Read access.
:param bool write: Write access.
:param bool manage: Manage access.
:returns: Updated SyncListPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_list.sync_list_permission.SyncListPermissionInstance |
def get_vlan_brief_input_request_type_get_next_request_last_rcvd_vlan_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vlan_brief = ET.Element("get_vlan_brief")
config = get_vlan_brief
input = ET.SubElement(get_vlan_brief, "input")
... | Auto Generated Code |
def get_sparse_matrix_keys(session, key_table):
"""Return a list of keys for the sparse matrix."""
return session.query(key_table).order_by(key_table.name).all() | Return a list of keys for the sparse matrix. |
def get_most_severe_consequence(transcripts):
"""Get the most severe consequence
Go through all transcripts and get the most severe consequence
Args:
transcripts (list): A list of transcripts to evaluate
Returns:
most_severe_consequence (str): The most severe conse... | Get the most severe consequence
Go through all transcripts and get the most severe consequence
Args:
transcripts (list): A list of transcripts to evaluate
Returns:
most_severe_consequence (str): The most severe consequence |
def magic_timeit(setup, stmt, ncalls=None, repeat=3, force_ms=False):
"""Time execution of a Python statement or expression
Usage:\\
%timeit [-n<N> -r<R> [-t|-c]] statement
Time execution of a Python statement or expression using the timeit
module.
Options:
-n<N>: execute the given stateme... | Time execution of a Python statement or expression
Usage:\\
%timeit [-n<N> -r<R> [-t|-c]] statement
Time execution of a Python statement or expression using the timeit
module.
Options:
-n<N>: execute the given statement <N> times in a loop. If this value
is not given, a fitting value is ch... |
def deploy_config(model, initial_instance_count, instance_type, endpoint_name=None, tags=None):
"""Export Airflow deploy config from a SageMaker model
Args:
model (sagemaker.model.Model): The SageMaker model to export the Airflow config from.
instance_type (str): The EC2 instance type to deploy... | Export Airflow deploy config from a SageMaker model
Args:
model (sagemaker.model.Model): The SageMaker model to export the Airflow config from.
instance_type (str): The EC2 instance type to deploy this Model to. For example, 'ml.p2.xlarge'.
initial_instance_count (int): The initial number o... |
def consolidate(self, args):
""" Consolidate the provided arguments.
If the provided arguments have matching options, this performs a type conversion.
For any option that has a default value and is not present in the provided
arguments, the default value is added.
Args:
... | Consolidate the provided arguments.
If the provided arguments have matching options, this performs a type conversion.
For any option that has a default value and is not present in the provided
arguments, the default value is added.
Args:
args (dict): A dictionary of the pro... |
async def close(self) -> None:
"""
Explicit exit. Closes pool. For use when keeping pool open across multiple calls.
"""
LOGGER.debug('NodePool.close >>>')
if not self.handle:
LOGGER.warning('Abstaining from closing pool %s: already closed', self.name)
else:... | Explicit exit. Closes pool. For use when keeping pool open across multiple calls. |
def entry_point() -> None:
"""**cxflow** entry point."""
# make sure the path contains the current working directory
sys.path.insert(0, os.getcwd())
parser = get_cxflow_arg_parser(True)
# parse CLI arguments
known_args, unknown_args = parser.parse_known_args()
# show help if no subcomman... | **cxflow** entry point. |
def parse_reports(self):
""" Find Picard HsMetrics reports and parse their data """
# Set up vars
self.picard_HsMetrics_data = dict()
# Go through logs and find Metrics
for f in self.find_log_files('picard/hsmetrics', filehandles=True):
parsed_data = dict()
s_name = None
ke... | Find Picard HsMetrics reports and parse their data |
def action(self):
"""
This class overrides this method
"""
self.return_value = self.function(*self.args, **self.kwargs) | This class overrides this method |
def add_unique(self, attr, item):
"""
在对象此字段对应的数组末尾添加指定对象,如果此对象并没有包含在字段中。
:param attr: 字段名
:param item: 要添加的对象
:return: 当前对象
"""
return self.set(attr, operation.AddUnique([item])) | 在对象此字段对应的数组末尾添加指定对象,如果此对象并没有包含在字段中。
:param attr: 字段名
:param item: 要添加的对象
:return: 当前对象 |
def Call(method,url,payload,silent=False,hide_errors=[],session=None,recursion_cnt=0,debug=False):
"""Execute v1 API call.
:param url: URL paths associated with the API call
:param payload: dict containing all parameters to submit with POST call
:param hide_errors: list of API error codes to ignore. These are... | Execute v1 API call.
:param url: URL paths associated with the API call
:param payload: dict containing all parameters to submit with POST call
:param hide_errors: list of API error codes to ignore. These are not http error codes but returned from the API itself
:param recursion_cnt: recursion counter. This ... |
def post(arguments):
'''Post text to a given twitter account.'''
twitter = api.API(arguments)
params = {}
if arguments.update == '-':
params['status'] = sys.stdin.read()
else:
params['status'] = arguments.update
if arguments.media_file:
medias = [twitter.media_upload(m)... | Post text to a given twitter account. |
def close(self, code=3000, message='Go away!'):
""" Close session or endpoint connection.
@param code: Closing code
@param message: Close message
"""
if self.state != SESSION_STATE.CLOSED:
try:
self.conn.connectionLost()
except Exception ... | Close session or endpoint connection.
@param code: Closing code
@param message: Close message |
def tweets_for(query_type, args, per_user=None):
"""
Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list.
"""
lookup = {"query_type": query_type, "value": args[0]}
... | Retrieve tweets for a user, list or search term. The optional
``per_user`` arg limits the number of tweets per user, for
example to allow a fair spread of tweets per user for a list. |
def widen(self, other):
""" Widen current range. """
if self.low < other.low:
low = -float("inf")
else:
low = self.low
if self.high > other.high:
high = float("inf")
else:
high = self.high
return Interval(low, high) | Widen current range. |
def convert_data_iterable(data_iterable, filter_func=None, converter_func=None): # TODO: add concatenate parameter
'''Convert raw data in data iterable.
Parameters
----------
data_iterable : iterable
Iterable where each element is a tuple with following content: (raw data, timestamp_star... | Convert raw data in data iterable.
Parameters
----------
data_iterable : iterable
Iterable where each element is a tuple with following content: (raw data, timestamp_start, timestamp_stop, status).
filter_func : function
Function that takes array and returns true or false for eac... |
def inside_try(func, options={}):
""" decorator to silence exceptions, for logging
we want a "safe" fail of the functions """
if six.PY2:
name = func.func_name
else:
name = func.__name__
@wraps(func)
def silenceit(*args, **kwargs):
""" the function func to be silence... | decorator to silence exceptions, for logging
we want a "safe" fail of the functions |
def process_records(records):
"""Converts queue entries into object changes.
:param records: an iterable containing `LiveSyncQueueEntry` objects
:return: a dict mapping object references to `SimpleChange` bitsets
"""
changes = defaultdict(int)
cascaded_update_records = set()
cascaded_delete... | Converts queue entries into object changes.
:param records: an iterable containing `LiveSyncQueueEntry` objects
:return: a dict mapping object references to `SimpleChange` bitsets |
def delete_files():
""" Delete one or more files from the server """
session_token = request.headers['session_token']
repository = request.headers['repository']
#===
current_user = have_authenticated_user(request.environ['REMOTE_ADDR'], repository, session_token)
if current_user is False: r... | Delete one or more files from the server |
def makeOuputDir(outputDir, force):
"""
Create or check for an output directory.
@param outputDir: A C{str} output directory name, or C{None}.
@param force: If C{True}, allow overwriting of pre-existing files.
@return: The C{str} output directory name.
"""
if outputDir:
if exists(ou... | Create or check for an output directory.
@param outputDir: A C{str} output directory name, or C{None}.
@param force: If C{True}, allow overwriting of pre-existing files.
@return: The C{str} output directory name. |
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return s... | Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool. |
def saltenviron(environ):
'''
Make Salt's opts dict and the APIClient available in the WSGI environ
'''
if '__opts__' not in locals():
import salt.config
__opts__ = salt.config.client_config(
os.environ.get('SALT_MASTER_CONFIG', '/etc/salt/master'))
environ['SALT_OPT... | Make Salt's opts dict and the APIClient available in the WSGI environ |
def userToId(url):
"""
Extract the username from a contact URL.
Matches addresses containing ``users/<user>`` or ``users/ME/contacts/<user>``.
Args:
url (str): Skype API URL
Returns:
str: extracted identifier
"""
match = re.search(r"user... | Extract the username from a contact URL.
Matches addresses containing ``users/<user>`` or ``users/ME/contacts/<user>``.
Args:
url (str): Skype API URL
Returns:
str: extracted identifier |
def create_html(self, fname, title="ClassTracker Statistics"):
"""
Create HTML page `fname` and additional files in a directory derived
from `fname`.
"""
# Create a folder to store the charts and additional HTML files.
self.basedir = os.path.dirname(os.path.abspath(fname)... | Create HTML page `fname` and additional files in a directory derived
from `fname`. |
def select_balanced_subset(items, select_count, categories, select_count_values=None, seed=None):
"""
Select items so the summed category weights are balanced.
Each item has a dictionary containing the category weights.
Items are selected until ``select_count`` is reached.
The value that is added to... | Select items so the summed category weights are balanced.
Each item has a dictionary containing the category weights.
Items are selected until ``select_count`` is reached.
The value that is added to ``select_count`` for an item can be defined in the dictionary ``select_count_values``.
If this is not def... |
def _init_from_csc(self, csc):
"""
Initialize data from a CSC matrix.
"""
if len(csc.indices) != len(csc.data):
raise ValueError('length mismatch: {} vs {}'.format(len(csc.indices), len(csc.data)))
handle = ctypes.c_void_p()
_check_call(_LIB.XGDMatrixCreateFro... | Initialize data from a CSC matrix. |
def voxelwise_diff(img_spec1=None,
img_spec2=None,
abs_value=True,
cmap='gray',
overlay_image=False,
overlay_alpha=0.8,
num_rows=2,
num_cols=6,
rescale_method='global',... | Voxel-wise difference map.
Parameters
----------
img_spec1 : str or nibabel image-like object
MR image (or path to one) to be visualized
img_spec2 : str or nibabel image-like object
MR image (or path to one) to be visualized
abs_value : bool
Flag indicating whether to take... |
def options(argv=[]):
"""
A helper function that returns a dictionary of the default key-values pairs
"""
parser = HendrixOptionParser
parsed_args = parser.parse_args(argv)
return vars(parsed_args[0]) | A helper function that returns a dictionary of the default key-values pairs |
def _set_vrrpv3e(self, v, load=False):
"""
Setter method for vrrpv3e, mapped from YANG variable /routing_system/interface/ve/ipv6/vrrpv3e (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrpv3e is considered as a private
method. Backends looking to populate th... | Setter method for vrrpv3e, mapped from YANG variable /routing_system/interface/ve/ipv6/vrrpv3e (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrpv3e is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._s... |
def create_option_vip(self):
"""Get an instance of option_vip services facade."""
return OptionVIP(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of option_vip services facade. |
def _results_accumulator(self, filename):
"""
:type filename: str
:param filename: name of file, used as a key to store in self.data
:yields: (dict, detect_secrets.plugins.base.BasePlugin)
Caller is responsible for updating the dictionary with
results o... | :type filename: str
:param filename: name of file, used as a key to store in self.data
:yields: (dict, detect_secrets.plugins.base.BasePlugin)
Caller is responsible for updating the dictionary with
results of plugin analysis. |
def color_args(args, *indexes):
"""
Color a list of arguments on particular indexes
>>> c = color_args([None,'blue'], 1)
>>> c.next()
None
>>> c.next()
'0000FF'
"""
for i,arg in enumerate(args):
if i in indexes:
yield lookup_color(arg)
else:
... | Color a list of arguments on particular indexes
>>> c = color_args([None,'blue'], 1)
>>> c.next()
None
>>> c.next()
'0000FF' |
def get_discrete_task_agent(generators, market, nStates, nOffer, markups,
withholds, maxSteps, learner, Pd0=None, Pd_min=0.0):
""" Returns a tuple of task and agent for the given learner.
"""
env = pyreto.discrete.MarketEnvironment(generators, market,
numS... | Returns a tuple of task and agent for the given learner. |
def from_code(cls, code: int) -> 'ColorCode':
""" Return a ColorCode from a terminal code. """
c = cls()
c._init_code(code)
return c | Return a ColorCode from a terminal code. |
def get_datetext(year, month, day):
"""year=2005, month=11, day=16 => '2005-11-16 00:00:00'"""
input_format = "%Y-%m-%d"
try:
datestruct = time.strptime("%i-%i-%i" % (year, month, day),
input_format)
return strftime(datetext_format, datestruct)
except:
... | year=2005, month=11, day=16 => '2005-11-16 00:00:00 |
def add(self, value):
"""
Add a value to the buffer.
"""
ind = int(self._ind % self.shape)
self._pos = self._ind % self.shape
self._values[ind] = value
if self._ind < self.shape:
self._ind += 1 # fast fill
else:
self._ind += self._... | Add a value to the buffer. |
def _worker_thread_disk(self):
# type: (Downloader) -> None
"""Worker thread for disk
:param Downloader self: this
"""
while not self.termination_check:
try:
dd, offsets, data = self._disk_queue.get(
block=False, timeout=0.1)
... | Worker thread for disk
:param Downloader self: this |
def update(self, resource, rid, updates):
"""
Updates the resource with id 'rid' with the given updates dictionary.
"""
if resource[-1] != '/':
resource += '/'
resource += str(rid)
return self.put(resource, data=updates) | Updates the resource with id 'rid' with the given updates dictionary. |
def dls(self)->List[DeviceDataLoader]:
"Returns a list of all DeviceDataLoaders. If you need a specific DeviceDataLoader, access via the relevant property (`train_dl`, `valid_dl`, etc) as the index of DLs in this list is not guaranteed to remain constant."
res = [self.train_dl, self.fix_dl, self.single_... | Returns a list of all DeviceDataLoaders. If you need a specific DeviceDataLoader, access via the relevant property (`train_dl`, `valid_dl`, etc) as the index of DLs in this list is not guaranteed to remain constant. |
def format(self, vertices):
"""Format instance to dump
vertices is dict of name to Vertex
"""
buf = io.StringIO()
buf.write(self.name + '\n')
buf.write('{\n')
buf.write(' type {};\n'.format(self.type_))
buf.write(' faces\n')
buf.write(' (... | Format instance to dump
vertices is dict of name to Vertex |
def dump(
self, stream, progress=None, lower=None, upper=None,
incremental=False, deltas=False
):
"""Dump the repository to a dumpfile stream.
:param stream: A file stream to which the dumpfile is written
:param progress: A file stream to which progress is written
:p... | Dump the repository to a dumpfile stream.
:param stream: A file stream to which the dumpfile is written
:param progress: A file stream to which progress is written
:param lower: Must be a numeric version number
:param upper: Must be a numeric version number
See ``svnadmin help ... |
def decimate(self, fraction=0.5, N=None, boundaries=False, verbose=True):
"""
Downsample the number of vertices in a mesh.
:param float fraction: the desired target of reduction.
:param int N: the desired number of final points (**fraction** is recalculated based on it).
:param ... | Downsample the number of vertices in a mesh.
:param float fraction: the desired target of reduction.
:param int N: the desired number of final points (**fraction** is recalculated based on it).
:param bool boundaries: (True), decide whether to leave boundaries untouched or not.
.. note... |
def do_forget(self, repo):
'''
Drop definition of a repo.
forget REPO
'''
self.abort_on_nonexisting_repo(repo, 'forget')
self.network.forget(repo) | Drop definition of a repo.
forget REPO |
def ParseNumericOption(self, options, name, base=10, default_value=None):
"""Parses a numeric option.
If the option is not set the default value is returned.
Args:
options (argparse.Namespace): command line arguments.
name (str): name of the numeric option.
base (Optional[int]): base of ... | Parses a numeric option.
If the option is not set the default value is returned.
Args:
options (argparse.Namespace): command line arguments.
name (str): name of the numeric option.
base (Optional[int]): base of the numeric value.
default_value (Optional[object]): default value.
Re... |
def per_from_id(flavors=chat_flavors+inline_flavors):
"""
:param flavors:
``all`` or a list of flavors
:return:
a seeder function that returns the from id only if the message flavor is
in ``flavors``.
"""
return _wrap_none(lambda msg:
msg['from']['i... | :param flavors:
``all`` or a list of flavors
:return:
a seeder function that returns the from id only if the message flavor is
in ``flavors``. |
def collapse_nodes(graph, survivor_mapping: Mapping[BaseEntity, Set[BaseEntity]]) -> None:
"""Collapse all nodes in values to the key nodes, in place.
:param pybel.BELGraph graph: A BEL graph
:param survivor_mapping: A dictionary with survivors as their keys, and iterables of the corresponding victims as
... | Collapse all nodes in values to the key nodes, in place.
:param pybel.BELGraph graph: A BEL graph
:param survivor_mapping: A dictionary with survivors as their keys, and iterables of the corresponding victims as
values. |
def get_body_text(self):
""" Parse the body html and returns the body text using bs4
:return: body text
:rtype: str
"""
if self.body_type != 'HTML':
return self.body
try:
soup = bs(self.body, 'html.parser')
except RuntimeError:
... | Parse the body html and returns the body text using bs4
:return: body text
:rtype: str |
def get_branches(self, local=True, remote_branches=True):
"""Returns a list of local and remote branches."""
if not self.repo.remotes:
remote_branches = False
branches = []
if remote_branches:
# Remote refs.
try:
for b in self.remot... | Returns a list of local and remote branches. |
def decorate(self, function_or_name):
'''Decorate a function to time the execution
The method can be called with or without a name. If no name is given
the function defaults to the name of the function.
:keyword function_or_name: The name to post to or the function to wrap
>>>... | Decorate a function to time the execution
The method can be called with or without a name. If no name is given
the function defaults to the name of the function.
:keyword function_or_name: The name to post to or the function to wrap
>>> from statsd import Timer
>>> timer = Tim... |
def connect(self):
''' activates the connection object '''
if not HAVE_ZMQ:
raise errors.AnsibleError("zmq is not installed")
# this is rough/temporary and will likely be optimized later ...
self.context = zmq.Context()
socket = self.context.socket(zmq.REQ)
... | activates the connection object |
def normalize_weight(self, samples):
"""normalize weight
Parameters
----------
samples: list
a collection of sample, it's a (NUM_OF_INSTANCE * NUM_OF_FUNCTIONS) matrix,
representing{{w11, w12, ..., w1k}, {w21, w22, ... w2k}, ...{wk1, wk2,..., wkk}}
... | normalize weight
Parameters
----------
samples: list
a collection of sample, it's a (NUM_OF_INSTANCE * NUM_OF_FUNCTIONS) matrix,
representing{{w11, w12, ..., w1k}, {w21, w22, ... w2k}, ...{wk1, wk2,..., wkk}}
Returns
-------
list
... |
async def api_call(self, verb, action, params=None, add_authorization_token=True, retry=False):
"""Send api call."""
if add_authorization_token and not self.token:
await self.refresh_token()
try:
return await self._api_call_impl(verb, action, params, add_authorization_to... | Send api call. |
def body(self) -> Union[bytes, str, List[Any], Dict[Any, Any], RawIOBase, None]:
"""
获取body
"""
return self._body | 获取body |
def get_eci_assignment_number(encoding):
"""\
Returns the ECI number for the provided encoding.
:param str encoding: A encoding name
:return str: The ECI number.
"""
try:
return consts.ECI_ASSIGNMENT_NUM[codecs.lookup(encoding).name]
except KeyError:
raise QRCodeError('Unkno... | \
Returns the ECI number for the provided encoding.
:param str encoding: A encoding name
:return str: The ECI number. |
def MSTORE8(self, address, value):
"""Save byte to memory"""
if istainted(self.pc):
for taint in get_taints(self.pc):
value = taint_with(value, taint)
self._allocate(address, 1)
self._store(address, Operators.EXTRACT(value, 0, 8), 1) | Save byte to memory |
def fix_nls(self, in_, out_):
"""Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
"""
if 0 == len(in_) or 0 == len(out_):
return out_
if "\r" in out_ and "\r" not in in_:... | Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original |
def parse(self):
""" Parse the options. """
# Run the parser
opt, arg = self.parser.parse_known_args(self.arguments)
self.opt = opt
self.arg = arg
self.check()
# Enable --all if no particular stat or group selected
opt.all = not any([
getattr(... | Parse the options. |
def _write_branch_and_tag_to_meta_yaml(self):
"""
Write branch and tag to meta.yaml by editing in place
"""
## set the branch to pull source from
with open(self.meta_yaml.replace("meta", "template"), 'r') as infile:
dat = infile.read()
newdat = dat.format(... | Write branch and tag to meta.yaml by editing in place |
def initialize(self, training_info, model, environment, device):
""" Initialize policy gradient from reinforcer settings """
self.target_model = self.model_factory.instantiate(action_space=environment.action_space).to(device)
self.target_model.load_state_dict(model.state_dict())
self.tar... | Initialize policy gradient from reinforcer settings |
def to_rest_models(models, includes=None):
""" Convert the models into a dict for serialization
models should be an array of single model objects that
will each be serialized.
:return: dict
"""
props = {}
props['data'] = []
for model in models:
props['data'].append(_to_rest(m... | Convert the models into a dict for serialization
models should be an array of single model objects that
will each be serialized.
:return: dict |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.