code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def non_blocking(func):
"""Decorator to run a function in a different thread.
It can be used to execute a command in a non-blocking way
like this::
@non_blocking
def add_one(n):
print 'starting'
import time
time.sleep(2)
print 'ending'
... | Decorator to run a function in a different thread.
It can be used to execute a command in a non-blocking way
like this::
@non_blocking
def add_one(n):
print 'starting'
import time
time.sleep(2)
print 'ending'
return n+1
thread... |
def verify_rsa_sha1(request, rsa_public_key):
"""Verify a RSASSA-PKCS #1 v1.5 base64 encoded signature.
Per `section 3.4.3`_ of the spec.
Note this method requires the jwt and cryptography libraries.
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
To satisfy `RFC2616 secti... | Verify a RSASSA-PKCS #1 v1.5 base64 encoded signature.
Per `section 3.4.3`_ of the spec.
Note this method requires the jwt and cryptography libraries.
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri
attri... |
def parse_message(message, nodata=False):
"""Parse df message from bytearray.
@message - message data
@nodata - do not load data
@return - [binary header, metadata, binary data]
"""
header = read_machine_header(message)
h_len = __get_machine_header_length(header)
meta_raw = message[h_l... | Parse df message from bytearray.
@message - message data
@nodata - do not load data
@return - [binary header, metadata, binary data] |
def _get_missing_trees(self, path, root_tree):
"""
Creates missing ``Tree`` objects for the given path.
:param path: path given as a string. It may be a path to a file node
(i.e. ``foo/bar/baz.txt``) or directory path - in that case it must
end with slash (i.e. ``foo/bar/``)... | Creates missing ``Tree`` objects for the given path.
:param path: path given as a string. It may be a path to a file node
(i.e. ``foo/bar/baz.txt``) or directory path - in that case it must
end with slash (i.e. ``foo/bar/``).
:param root_tree: ``dulwich.objects.Tree`` object from wh... |
def compile_datetime(rule):
"""
Compiler helper method: attempt to compile constant into object representing
datetime object to enable relations and thus simple comparisons using Python
operators.
"""
if isinstance(rule.value, datetime.datetime):
return rule
try:
# Try numeri... | Compiler helper method: attempt to compile constant into object representing
datetime object to enable relations and thus simple comparisons using Python
operators. |
def get_stp_mst_detail_output_msti_port_link_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_stp_mst_detail = ET.Element("get_stp_mst_detail")
config = get_stp_mst_detail
output = ET.SubElement(get_stp_mst_detail, "output")
msti ... | Auto Generated Code |
def pixel_array_to_image(self, width, height, channels, undefined_on_failure=True, allow_rounding=False):
"""
Create a new SArray with all the values cast to :py:class:`turicreate.image.Image`
of uniform size.
Parameters
----------
width: int
The width of the... | Create a new SArray with all the values cast to :py:class:`turicreate.image.Image`
of uniform size.
Parameters
----------
width: int
The width of the new images.
height: int
The height of the new images.
channels: int.
Number of chan... |
def pytype_to_ctype(t):
""" Python -> pythonic type binding. """
if isinstance(t, List):
return 'pythonic::types::list<{0}>'.format(
pytype_to_ctype(t.__args__[0])
)
elif isinstance(t, Set):
return 'pythonic::types::set<{0}>'.format(
pytype_to_ctype(t.__args__... | Python -> pythonic type binding. |
def load_XAML(file_obj, *args, **kwargs):
"""
Load a 3D XAML file.
Parameters
----------
file_obj : file object
Open, containing XAML file
Returns
----------
result : dict
kwargs for a trimesh constructor, including:
vertices: (n,3)... | Load a 3D XAML file.
Parameters
----------
file_obj : file object
Open, containing XAML file
Returns
----------
result : dict
kwargs for a trimesh constructor, including:
vertices: (n,3) np.float64, points in space
faces: ... |
def _do_perform_delete_on_model(self):
"""
Perform the actual delete query on this model instance.
"""
if self._force_deleting:
return self.with_trashed().where(self.get_key_name(), self.get_key()).force_delete()
return self._run_soft_delete() | Perform the actual delete query on this model instance. |
def get_trace(self, project_id, trace_id):
"""
Gets a single trace by its ID.
Args:
trace_id (str): ID of the trace to return.
project_id (str): Required. ID of the Cloud project where the trace
data is stored.
Returns:
A Trace dict.
... | Gets a single trace by its ID.
Args:
trace_id (str): ID of the trace to return.
project_id (str): Required. ID of the Cloud project where the trace
data is stored.
Returns:
A Trace dict. |
def applyKeyMapping(self, mapping):
"""
Used as the second half of the key reassignment algorithm.
Loops over each row in the table, replacing references to
old row keys with the new values from the mapping.
"""
for coltype, colname in zip(self.columntypes, self.columnnames):
if coltype in ligolwtypes.ID... | Used as the second half of the key reassignment algorithm.
Loops over each row in the table, replacing references to
old row keys with the new values from the mapping. |
def thanksgiving(year, country='usa'):
'''USA: last Thurs. of November, Canada: 2nd Mon. of October'''
if country == 'usa':
if year in [1940, 1941]:
return nth_day_of_month(3, THU, NOV, year)
elif year == 1939:
return nth_day_of_month(4, THU, NOV, year)
else:
... | USA: last Thurs. of November, Canada: 2nd Mon. of October |
def download(client, target_dir):
"""Download inappproducts from play store."""
print('')
print("download inappproducts")
print('---------------------')
products = client.list_inappproducts()
for product in products:
path = os.path.join(target_dir, 'products')
del product['packa... | Download inappproducts from play store. |
def supported(cls, stream=sys.stdout):
"""
A class method that returns True if the current platform supports
coloring terminal output using this method. Returns False otherwise.
"""
if not stream.isatty():
return False # auto color only on TTYs
try:
... | A class method that returns True if the current platform supports
coloring terminal output using this method. Returns False otherwise. |
def get_tournament(self, tag: crtag, **params: keys):
"""Get a tournament information
Parameters
----------
tag: str
A valid tournament tag. Minimum length: 3
Valid characters: 0289PYLQGRJCUV
\*\*keys: Optional[list] = None
Filter which keys s... | Get a tournament information
Parameters
----------
tag: str
A valid tournament tag. Minimum length: 3
Valid characters: 0289PYLQGRJCUV
\*\*keys: Optional[list] = None
Filter which keys should be included in the
response
\*\*exclude... |
def loading(self):
"""Context manager for when you need to instantiate entities upon unpacking"""
if getattr(self, '_initialized', False):
raise ValueError("Already loading")
self._initialized = False
yield
self._initialized = True | Context manager for when you need to instantiate entities upon unpacking |
def ast_to_code(ast, indent=0):
# type: (Any, int) -> str
"""
Converts an ast into a python code representation of the AST.
"""
code = []
def append(line):
# type: (str) -> None
code.append((" " * indent) + line)
if isinstance(ast, Node):
append("ast.{}(".format(... | Converts an ast into a python code representation of the AST. |
def list_all(self):
"""Return all equipments in database
:return: Dictionary with the following structure:
::
{'equipaments': {'name' :< name_equipament >}, {... demais equipamentos ...} }
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLE... | Return all equipments in database
:return: Dictionary with the following structure:
::
{'equipaments': {'name' :< name_equipament >}, {... demais equipamentos ...} }
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to gene... |
def calc_time_step(self):
"""
Set the time step during time domain simulations
Parameters
----------
convergence: bool
truth value of the convergence of the last step
niter: int
current iteration count
t: float
current simulati... | Set the time step during time domain simulations
Parameters
----------
convergence: bool
truth value of the convergence of the last step
niter: int
current iteration count
t: float
current simulation time
Returns
-------
... |
def get_script_args(dist, executable=sys_executable, wininst=False):
"""Yield write_script() argument tuples for a distribution's entrypoints"""
spec = str(dist.as_requirement())
header = get_script_header("", executable, wininst)
for group in 'console_scripts', 'gui_scripts':
for name, ep in di... | Yield write_script() argument tuples for a distribution's entrypoints |
def coupling_matrix_2j(j1, j2):
ur"""For angular momenta $j_1, j_2$ the unitary transformation from the \
uncoupled basis into the $j = j_1 \oplus j_2$ coupled basis.
>>> from sympy import Integer, pprint
>>> L = 0
>>> S = 1/Integer(2)
>>> pprint(coupling_matrix_2j(L, S))
β‘1 0β€
β’ β₯
... | ur"""For angular momenta $j_1, j_2$ the unitary transformation from the \
uncoupled basis into the $j = j_1 \oplus j_2$ coupled basis.
>>> from sympy import Integer, pprint
>>> L = 0
>>> S = 1/Integer(2)
>>> pprint(coupling_matrix_2j(L, S))
β‘1 0β€
β’ β₯
β£0 1β¦
>>> L = 1
>>> S ... |
def upload_file(self, file_or_path, obj_name=None, content_type=None,
etag=None, return_none=False, content_encoding=None, ttl=None,
content_length=None, headers=None):
"""
Uploads the specified file to this container. If no name is supplied,
the file's name will be used.... | Uploads the specified file to this container. If no name is supplied,
the file's name will be used. Either a file path or an open file-like
object may be supplied. A StorageObject reference to the uploaded file
will be returned, unless 'return_none' is set to True.
You may optionally se... |
def unit(self):
""" Returns the unit attribute of the underlying ncdf variable.
If the units has a length (e.g is a list) and has precisely one element per field,
the unit for this field is returned.
"""
unit = ncVarUnit(self._ncVar)
fieldNames = self._ncVar.dtyp... | Returns the unit attribute of the underlying ncdf variable.
If the units has a length (e.g is a list) and has precisely one element per field,
the unit for this field is returned. |
def detect_protocol(cls, message):
'''Attempt to detect the protocol from the message.'''
main = cls._message_to_payload(message)
def protocol_for_payload(payload):
if not isinstance(payload, dict):
return JSONRPCLoose # Will error
# Obey an explicit "j... | Attempt to detect the protocol from the message. |
def create_archive(archive, filenames, verbosity=0, program=None, interactive=True):
"""Create given archive with given files."""
util.check_new_filename(archive)
util.check_archive_filelist(filenames)
if verbosity >= 0:
util.log_info("Creating %s ..." % archive)
res = _create_archive(archiv... | Create given archive with given files. |
def is_web_url(string):
"""Check to see if string is an validly-formatted web url."""
assert isinstance(string, basestring)
parsed_url = urllib.parse.urlparse(string)
return (
(
parsed_url.scheme.lower() == 'http'
or parsed_url.scheme.lower() == 'https'
)
... | Check to see if string is an validly-formatted web url. |
def scan_config_argument(ctx, param, value, config_dir=None):
"""Validate / translate config name/path values for click config arg.
Wrapper on top of :func:`cli.scan_config`."""
if callable(config_dir):
config_dir = config_dir()
if not config:
click.echo("Enter at least one CONFIG")
... | Validate / translate config name/path values for click config arg.
Wrapper on top of :func:`cli.scan_config`. |
def compare(jaide, commands):
""" Perform a show | compare with some set commands.
@param jaide: The jaide connection to the device.
@type jaide: jaide.Jaide object
@param commands: The set commands to send to the device to compare with.
@type commands: str or list
@returns: The output from th... | Perform a show | compare with some set commands.
@param jaide: The jaide connection to the device.
@type jaide: jaide.Jaide object
@param commands: The set commands to send to the device to compare with.
@type commands: str or list
@returns: The output from the device.
@rtype str |
def offset(self):
"""int: offset of the key within the Windows Registry file or None."""
if not self._registry_key and self._registry:
self._GetKeyFromRegistry()
if not self._registry_key:
return None
return self._registry_key.offset | int: offset of the key within the Windows Registry file or None. |
def collect_results(self) -> Optional[Tuple[int, Dict[str, float]]]:
"""
Returns the decoded checkpoint and the decoder metrics or None if the queue is empty.
"""
self.wait_to_finish()
if self.decoder_metric_queue.empty():
if self._results_pending:
sel... | Returns the decoded checkpoint and the decoder metrics or None if the queue is empty. |
def currentDateTime(self):
"""
Returns the current date time for this widget.
:return <datetime.datetime>
"""
view = self.uiGanttVIEW
scene = view.scene()
point = view.mapToScene(0, 0)
return scene.datetimeAt(point.x()) | Returns the current date time for this widget.
:return <datetime.datetime> |
def _create_ret_object(self, status=SUCCESS, data=None, error=False,
error_message=None, error_cause=None):
"""
Create generic reponse objects.
:param str status: The SUCCESS or FAILURE of the request
:param obj data: The data to return
:param bool err... | Create generic reponse objects.
:param str status: The SUCCESS or FAILURE of the request
:param obj data: The data to return
:param bool error: Set to True to add Error response
:param str error_message: The generic error message
:param str error_cause: The cause of the error
... |
def __configure_interior(self, *args):
"""
Private function to configure the interior Frame.
:param args: Tkinter event
"""
# Resize the canvas scrollregion to fit the entire frame
(size_x, size_y) = (self.interior.winfo_reqwidth(), self.interior.winfo_reqheight(... | Private function to configure the interior Frame.
:param args: Tkinter event |
def get_connection(self):
"""Get a connection to this Database. Connections are retrieved from a
pool.
"""
if not self.open:
raise exc.ResourceClosedError('Database closed.')
return Connection(self._engine.connect()) | Get a connection to this Database. Connections are retrieved from a
pool. |
def add(self, nb = 1, name = None, xid = None):
"""
Create one or many workers.
"""
for x in xrange(nb):
self.count_lock.acquire()
if self.workers >= self.max_workers:
self.count_lock.release()
continue
self.workers += 1... | Create one or many workers. |
def fetch(self, wait=0):
"""
get the task result objects.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects
"""
if self.started:
return fetch(self.id, wait=wait, cached=self.cached) | get the task result objects.
:param int wait: how many milliseconds to wait for a result
:return: an unsorted list of task objects |
def extend(validator, validators=(), version=None, type_checker=None):
"""
Create a new validator class by extending an existing one.
Arguments:
validator (jsonschema.IValidator):
an existing validator class
validators (collections.Mapping):
a mapping of new vali... | Create a new validator class by extending an existing one.
Arguments:
validator (jsonschema.IValidator):
an existing validator class
validators (collections.Mapping):
a mapping of new validator callables to extend with, whose
structure is as in `create`.
... |
def list_same_dimensions(self, unit_object):
"""
Return a list of base unit names that this registry knows about that
are of equivalent dimensions to *unit_object*.
"""
equiv = [k for k, v in self.lut.items() if v[1] is unit_object.dimensions]
equiv = list(sorted(set(equi... | Return a list of base unit names that this registry knows about that
are of equivalent dimensions to *unit_object*. |
def MeshLines(*inputobj, **options):
"""
Build the line segments between two lists of points `startPoints` and `endPoints`.
`startPoints` can be also passed in the form ``[[point1, point2], ...]``.
A dolfin ``Mesh`` that was deformed/modified by a function can be
passed together as inputs.
:pa... | Build the line segments between two lists of points `startPoints` and `endPoints`.
`startPoints` can be also passed in the form ``[[point1, point2], ...]``.
A dolfin ``Mesh`` that was deformed/modified by a function can be
passed together as inputs.
:param float scale: apply a rescaling factor to the ... |
def dt(self, start_node=None):
"""main method to create an RSTTree from the output of get_rs3_data().
TODO: add proper documentation
"""
if start_node is None:
return self.root2tree(start_node=start_node)
elem_id = start_node
if elem_id not in self.elem_dict... | main method to create an RSTTree from the output of get_rs3_data().
TODO: add proper documentation |
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, **kw):
"""
Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic type... | Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
... |
def shared_databases(self):
"""
Retrieves a list containing the names of databases shared
with this account.
:returns: List of database names
"""
endpoint = '/'.join((
self.server_url, '_api', 'v2', 'user', 'shared_databases'))
resp = self.r_session.g... | Retrieves a list containing the names of databases shared
with this account.
:returns: List of database names |
def _file_in_patch(self, filename, patch, ignore):
""" Checks if a backup file of the filename in the current patch
exists """
file = self.quilt_pc + File(os.path.join(patch.get_name(), filename))
if file.exists():
if ignore:
return True
else:
... | Checks if a backup file of the filename in the current patch
exists |
def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term provided in Equation (5)
"""
dmag = mag - 8.0
return C["c0"] + C["c3"] * dmag + C["c4"] * (dmag ** 2.) | Returns the magnitude scaling term provided in Equation (5) |
def matches_to_marker_results(df):
"""Perfect BLAST matches to marker results dict
Parse perfect BLAST matches to marker results dict.
Args:
df (pandas.DataFrame): DataFrame of perfect BLAST matches
Returns:
dict: cgMLST330 marker names to matching allele numbers
"""
assert i... | Perfect BLAST matches to marker results dict
Parse perfect BLAST matches to marker results dict.
Args:
df (pandas.DataFrame): DataFrame of perfect BLAST matches
Returns:
dict: cgMLST330 marker names to matching allele numbers |
async def release(
self, *, comment: str = None, erase: bool = None,
secure_erase: bool = None, quick_erase: bool = None,
wait: bool = False, wait_interval: int = 5):
"""
Release the machine.
:param comment: Reason machine was released.
:type comment:... | Release the machine.
:param comment: Reason machine was released.
:type comment: `str`
:param erase: Erase the disk when release.
:type erase: `bool`
:param secure_erase: Use the drive's secure erase feature if available.
:type secure_erase: `bool`
:param quick_e... |
def balance(self):
"""Check this transaction for correctness"""
self.check()
if not sum(map(lambda x: x.amount, self.src)) == -self.amount:
raise XnBalanceError("Sum of source amounts "
"not equal to transaction amount")
if not sum(map(lambda ... | Check this transaction for correctness |
def wv45(msg):
"""Wake vortex.
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: Wake vortex level. 0=NIL, 1=Light, 2=Moderate, 3=Severe
"""
d = hex2bin(data(msg))
if d[12] == '0':
return None
ws = bin2int(d[13:15])
return ws | Wake vortex.
Args:
msg (String): 28 bytes hexadecimal message string
Returns:
int: Wake vortex level. 0=NIL, 1=Light, 2=Moderate, 3=Severe |
def _full_name(self, record_name):
"""Returns full domain name of a sub-domain name"""
# Handle None and empty strings
if not record_name:
return self.domain
return super(Provider, self)._full_name(record_name) | Returns full domain name of a sub-domain name |
def _format_finite(negative, digits, dot_pos):
"""Given a (possibly empty) string of digits and an integer
dot_pos indicating the position of the decimal point relative to
the start of that string, output a formatted numeric string with
the same value and same implicit exponent."""
# strip leading ... | Given a (possibly empty) string of digits and an integer
dot_pos indicating the position of the decimal point relative to
the start of that string, output a formatted numeric string with
the same value and same implicit exponent. |
def get_product(id=None, name=None):
"""
Get a specific Product by name or ID
"""
content = get_product_raw(id, name)
if content:
return utils.format_json(content) | Get a specific Product by name or ID |
def wirevector_subset(self, cls=None, exclude=tuple()):
"""Return set of wirevectors, filtered by the type or tuple of types provided as cls.
If no cls is specified, the full set of wirevectors associated with the Block are
returned. If cls is a single type, or a tuple of types, only those wir... | Return set of wirevectors, filtered by the type or tuple of types provided as cls.
If no cls is specified, the full set of wirevectors associated with the Block are
returned. If cls is a single type, or a tuple of types, only those wirevectors of
the matching types will be returned. This is h... |
def create(url, filename):
"""Create new fMRI for given experiment by uploading local file.
Expects an tar-archive.
Parameters
----------
url : string
Url to POST fMRI create request
filename : string
Path to tar-archive on local disk
Ret... | Create new fMRI for given experiment by uploading local file.
Expects an tar-archive.
Parameters
----------
url : string
Url to POST fMRI create request
filename : string
Path to tar-archive on local disk
Returns
-------
string
... |
def one_hot_encoding(input_tensor, num_labels):
""" One-hot encode labels from input """
xview = input_tensor.view(-1, 1).to(torch.long)
onehot = torch.zeros(xview.size(0), num_labels, device=input_tensor.device, dtype=torch.float)
onehot.scatter_(1, xview, 1)
return onehot.view(list(input_tensor.s... | One-hot encode labels from input |
def tf_action_exploration(self, action, exploration, action_spec):
"""
Applies optional exploration to the action (post-processor for action outputs).
Args:
action (tf.Tensor): The original output action tensor (to be post-processed).
exploration (Exploration): The Exp... | Applies optional exploration to the action (post-processor for action outputs).
Args:
action (tf.Tensor): The original output action tensor (to be post-processed).
exploration (Exploration): The Exploration object to use.
action_spec (dict): Dict specifying the action spa... |
def _element_keywords(cls, backend, elements=None):
"Returns a dictionary of element names to allowed keywords"
if backend not in Store.loaded_backends():
return {}
mapping = {}
backend_options = Store.options(backend)
elements = elements if elements is not None else... | Returns a dictionary of element names to allowed keywords |
def reset(self):
"""
Called when open/close a project. Cleanup internal stuff
"""
self._allocated_node_names = set()
self._nodes = {}
self._links = {}
self._drawings = {}
self._snapshots = {}
# List the available snapshots
snapshot_dir = o... | Called when open/close a project. Cleanup internal stuff |
def eth_getStorageAt(self, address, position=0, block=BLOCK_TAG_LATEST):
"""https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat
:param address: Storage address
:type address: str
:param position: Position in storage (optional)
:type position: int
:param blo... | https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat
:param address: Storage address
:type address: str
:param position: Position in storage (optional)
:type position: int
:param block: Block tag or number (optional)
:type block: int or BLOCK_TAGS
:... |
def read_memory(self, addr, transfer_size=32, now=True):
"""! @brief Read a memory location.
By default, a word will be read.
"""
assert transfer_size in (8, 16, 32)
if transfer_size == 32:
result = conversion.byte_list_to_u32le_list(self._link.read_mem32(add... | ! @brief Read a memory location.
By default, a word will be read. |
def print_matrix(X, decimals=1):
"""Pretty printing for numpy matrix X"""
for row in np.round(X, decimals=decimals):
print(row) | Pretty printing for numpy matrix X |
def diam_swamee(FlowRate, HeadLossFric, Length, Nu, PipeRough):
"""Return the inner diameter of a pipe.
The Swamee Jain equation is dimensionally correct and returns the
inner diameter of a pipe given the flow rate and the head loss due
to shear on the pipe walls. The Swamee Jain equation does NOT take... | Return the inner diameter of a pipe.
The Swamee Jain equation is dimensionally correct and returns the
inner diameter of a pipe given the flow rate and the head loss due
to shear on the pipe walls. The Swamee Jain equation does NOT take
minor losses into account. This equation ONLY applies to turbulent... |
def save_config(
self,
cmd="copy running-configuration startup-configuration",
confirm=False,
confirm_response="",
):
"""Saves Config"""
return super(DellForce10SSH, self).save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
) | Saves Config |
def drop_duplicates(self, subset=None, keep='min'):
"""Return DataFrame with duplicate rows (excluding index) removed,
optionally only considering subset columns.
Note that the row order is NOT maintained due to hashing.
Parameters
----------
subset : list of str, optio... | Return DataFrame with duplicate rows (excluding index) removed,
optionally only considering subset columns.
Note that the row order is NOT maintained due to hashing.
Parameters
----------
subset : list of str, optional
Which columns to consider
keep : {'+', ... |
def _pycall_path_simple(
x1: int, y1: int, x2: int, y2: int, handle: Any
) -> float:
"""Does less and should run faster, just calls the handle function."""
return ffi.from_handle(handle)(x1, y1, x2, y2) | Does less and should run faster, just calls the handle function. |
def annotate_op(self, op):
"""
Takes a bytecode operation (:class:`Op`) and annotates it using the
data contained in this code object.
Arguments:
op(Op): An :class:`Op` instance.
Returns:
AnnotatedOp: An annotated bytecode operation.
"""
... | Takes a bytecode operation (:class:`Op`) and annotates it using the
data contained in this code object.
Arguments:
op(Op): An :class:`Op` instance.
Returns:
AnnotatedOp: An annotated bytecode operation. |
def _set_config(self, config=None):
"""Set this component's initial configuration"""
if not config:
config = {}
try:
# pprint(self.configschema)
self.config = self.componentmodel(config)
# self.log("Config schema:", lvl=critical)
# ppr... | Set this component's initial configuration |
def wrap_penalty(p, fit_linear, linear_penalty=0.):
"""
tool to account for unity penalty on the linear term of any feature.
example:
p = wrap_penalty(derivative, fit_linear=True)(n, coef)
Parameters
----------
p : callable.
penalty-matrix-generating function.
fit_linear : ... | tool to account for unity penalty on the linear term of any feature.
example:
p = wrap_penalty(derivative, fit_linear=True)(n, coef)
Parameters
----------
p : callable.
penalty-matrix-generating function.
fit_linear : boolean.
whether the current feature has a linear term o... |
def f2p(phrase, max_word_size=15, cutoff=3):
"""Convert a Finglish phrase to the most probable Persian phrase.
"""
results = f2p_list(phrase, max_word_size, cutoff)
return ' '.join(i[0][0] for i in results) | Convert a Finglish phrase to the most probable Persian phrase. |
def _unpack_zipfile(filename, extract_dir):
"""Unpack zip `filename` to `extract_dir`
"""
try:
import zipfile
except ImportError:
raise ReadError('zlib not supported, cannot unpack this archive.')
if not zipfile.is_zipfile(filename):
raise ReadError("%s is not a zip file" % ... | Unpack zip `filename` to `extract_dir` |
def Decrypt(self, encrypted_data):
"""Decrypts the encrypted data.
Args:
encrypted_data (bytes): encrypted data.
Returns:
tuple[bytes, bytes]: decrypted data and remaining encrypted data.
"""
index_split = -(len(encrypted_data) % AES.block_size)
if index_split:
remaining_encr... | Decrypts the encrypted data.
Args:
encrypted_data (bytes): encrypted data.
Returns:
tuple[bytes, bytes]: decrypted data and remaining encrypted data. |
def allocate_objects(self, eps = 0.01, noise_size = 1):
"""!
@brief Allocates object segments.
@param[in] eps (double): Tolerance level that define maximal difference between phases of oscillators in one segment.
@param[in] noise_size (uint): Threshold that defines noise - ... | !
@brief Allocates object segments.
@param[in] eps (double): Tolerance level that define maximal difference between phases of oscillators in one segment.
@param[in] noise_size (uint): Threshold that defines noise - segments size (in pixels) that is less then the threshold is conside... |
def verify_certificate_issuer(self, certificate_issuer_id, **kwargs): # noqa: E501
"""Verify certificate issuer. # noqa: E501
A utility API that can be used to validate the user configuration before activating a certificate issuer. Verifies that the certificate issuer is accessible and can be used to... | Verify certificate issuer. # noqa: E501
A utility API that can be used to validate the user configuration before activating a certificate issuer. Verifies that the certificate issuer is accessible and can be used to generate certificates by Device Management. <br> **Note:** The API requests the 3rd party CA t... |
def evaluate(self):
"""Evaluate functional value of previous iteration."""
X = mp_Z_Y
Xf = mp_Zf
Df = mp_Df
Sf = mp_Sf
Ef = sl.inner(Df[np.newaxis, ...], Xf,
axis=self.xstep.cri.axisM+1) - Sf
Ef = np.swapaxes(Ef, 0, self.xstep.cri.axisK+1)[... | Evaluate functional value of previous iteration. |
def elapsed(self):
"""
Get elapsed time is seconds (float)
"""
# Clock stops running when total is reached
if self.count == self.total:
elapsed = self.last_update - self.start
else:
elapsed = time.time() - self.start
return elapsed | Get elapsed time is seconds (float) |
def set_editor(self, editor):
"""
Sets the associated editor, when the editor's offset calculator mode
emit the signal pic_infos_available, the table is automatically
refreshed.
You can also refresh manually by calling :meth:`update_pic_infos`.
"""
if self._edito... | Sets the associated editor, when the editor's offset calculator mode
emit the signal pic_infos_available, the table is automatically
refreshed.
You can also refresh manually by calling :meth:`update_pic_infos`. |
def parse_model_specifier(specifier):
'''
Parses a string that specifies either a model or a field.
The string should look like ``app.model.[field]``.
>>> print parse_model_specifier('tests.TestModel')
(<class 'tests.models.TestModel'>, None)
>>> print parse_model_specifier('tests.TestModel.ima... | Parses a string that specifies either a model or a field.
The string should look like ``app.model.[field]``.
>>> print parse_model_specifier('tests.TestModel')
(<class 'tests.models.TestModel'>, None)
>>> print parse_model_specifier('tests.TestModel.image')
(<class 'tests.models.TestModel'>, 'image... |
def addSynapse(self, srcCellCol, srcCellIdx, perm):
"""Add a new synapse
:param srcCellCol source cell column
:param srcCellIdx source cell index within the column
:param perm initial permanence
"""
self.syns.append([int(srcCellCol), int(srcCellIdx), numpy.float32(perm)]) | Add a new synapse
:param srcCellCol source cell column
:param srcCellIdx source cell index within the column
:param perm initial permanence |
def setdefault(self, k, d=None):
"""Override dict.setdefault() to title-case keys."""
return super(HeaderDict, self).setdefault(k.title(), d) | Override dict.setdefault() to title-case keys. |
def _define_output_buffers(self):
"""
Prepare a dictionary so we know what buffers have to be update with the the output of every step.
"""
# First define buffers that need input data
self.target_buffers = {
None: [(step, self.buffers[step]) for step in self._get_inp... | Prepare a dictionary so we know what buffers have to be update with the the output of every step. |
def parse_for(control_line):
"""Returns name of loop control variable(s), iteration type (in/word_in) and
expression to iterate on.
For example:
- given "for $i in $foo", returns (['i'], '$foo')
- given "for ${i} in $(ls $foo)", returns (['i'], '$(ls $foo)')
- given "for $k, $v in $foo", return... | Returns name of loop control variable(s), iteration type (in/word_in) and
expression to iterate on.
For example:
- given "for $i in $foo", returns (['i'], '$foo')
- given "for ${i} in $(ls $foo)", returns (['i'], '$(ls $foo)')
- given "for $k, $v in $foo", returns (['k', 'v'], '$foo') |
def global_var(self, name):
"""Inserts a new static (global) variable definition"""
self.newline_label(name, False, True)
self.newline_text("WORD\t1", True) | Inserts a new static (global) variable definition |
def crop(self, extent, copy=False):
"""
Crop to a new depth range.
Args:
extent (tuple): The new start and stop depth. Must be 'inside'
existing striplog.
copy (bool): Whether to operate in place or make a copy.
Returns:
Operates in p... | Crop to a new depth range.
Args:
extent (tuple): The new start and stop depth. Must be 'inside'
existing striplog.
copy (bool): Whether to operate in place or make a copy.
Returns:
Operates in place by deault; if copy is True, returns a striplog. |
def OnInsertCols(self, event):
"""Inserts the maximum of 1 and the number of selected columns"""
bbox = self.grid.selection.get_bbox()
if bbox is None or bbox[1][1] is None:
# Insert rows at cursor
ins_point = self.grid.actions.cursor[1] - 1
no_cols = 1
... | Inserts the maximum of 1 and the number of selected columns |
def get_issns_for_journal(nlm_id):
"""Get a list of the ISSN numbers for a journal given its NLM ID.
Information on NLM XML DTDs is available at
https://www.nlm.nih.gov/databases/dtd/
"""
params = {'db': 'nlmcatalog',
'retmode': 'xml',
'id': nlm_id}
tree = send_reque... | Get a list of the ISSN numbers for a journal given its NLM ID.
Information on NLM XML DTDs is available at
https://www.nlm.nih.gov/databases/dtd/ |
def towgs84(E, N, pkm=False, presentation=None):
"""
Convert coordintes from TWD97 to WGS84
The east and north coordinates should be in meters and in float
pkm true for Penghu, Kinmen and Matsu area
You can specify one of the following presentations of the returned values:
dms - A tuple wit... | Convert coordintes from TWD97 to WGS84
The east and north coordinates should be in meters and in float
pkm true for Penghu, Kinmen and Matsu area
You can specify one of the following presentations of the returned values:
dms - A tuple with degrees (int), minutes (int) and seconds (float)
dm... |
def state_get(self):
"""Return the internal state of the DataFrame in a dictionary
Example:
>>> import vaex
>>> df = vaex.from_scalars(x=1, y=2)
>>> df['r'] = (df.x**2 + df.y**2)**0.5
>>> df.state_get()
{'active_range': [0, 1],
'column_names': ['x', 'y',... | Return the internal state of the DataFrame in a dictionary
Example:
>>> import vaex
>>> df = vaex.from_scalars(x=1, y=2)
>>> df['r'] = (df.x**2 + df.y**2)**0.5
>>> df.state_get()
{'active_range': [0, 1],
'column_names': ['x', 'y', 'r'],
'description': No... |
def new(self, request):
"""Render a form to create a new object."""
form = (self.form or generate_form(self.model))()
return self._render(
request = request,
template = 'new',
context = {
'form': form
},
status = 200
... | Render a form to create a new object. |
def connect_cloudfront(self):
"Connect to Cloud Front. This is done automatically for you when needed."
self.conn_cloudfront = connect_cloudfront(self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY, debug=self.S3UTILS_DEBUG_LEVEL) | Connect to Cloud Front. This is done automatically for you when needed. |
def filter_paragraphs(paragraphs, contains=None):
"""Filter paragraphs to only those containing one of a list of strings
Parameters
----------
paragraphs : list of str
List of plaintext paragraphs from an article
contains : str or list of str
Exclude paragraphs not containing this ... | Filter paragraphs to only those containing one of a list of strings
Parameters
----------
paragraphs : list of str
List of plaintext paragraphs from an article
contains : str or list of str
Exclude paragraphs not containing this string as a token, or
at least one of the strings... |
def begin_transaction(self, transaction_type, trace_parent=None):
"""Register the start of a transaction on the client
"""
return self.tracer.begin_transaction(transaction_type, trace_parent=trace_parent) | Register the start of a transaction on the client |
def resolve_freezer(freezer):
"""
Locate the appropriate freezer given FREEZER or string input from the programmer.
:param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT)
:return:
"""
# Set default freezer if there was none
if not freezer:
... | Locate the appropriate freezer given FREEZER or string input from the programmer.
:param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT)
:return: |
def parse_rst(text: str) -> docutils.nodes.document:
"""Parse text assuming it's an RST markup."""
parser = docutils.parsers.rst.Parser()
components = (docutils.parsers.rst.Parser,)
settings = docutils.frontend.OptionParser(components=components).get_default_values()
document = docutils.utils.new_do... | Parse text assuming it's an RST markup. |
def use(parser, token):
'''
Counterpart to `macro`, lets you render any block/macro in place.
'''
args, kwargs = parser.parse_args(token)
assert isinstance(args[0], ast.Str), \
'First argument to "include" tag must be a string'
name = args[0].s
action = ast.YieldFrom(
value... | Counterpart to `macro`, lets you render any block/macro in place. |
def read_binary(self, num, item_type='B'):
"""Parse the current buffer offset as the specified code."""
if 'B' in item_type:
return self.read(num)
if item_type[0] in ('@', '=', '<', '>', '!'):
order = item_type[0]
item_type = item_type[1:]
else:
... | Parse the current buffer offset as the specified code. |
def setup(app):
"""Setup sphinx-gallery sphinx extension"""
app.add_config_value('plot_gallery', True, 'html')
app.add_config_value('abort_on_example_error', False, 'html')
app.add_config_value('sphinx_gallery_conf', gallery_conf, 'html')
app.add_stylesheet('gallery.css')
app.connect('builder-i... | Setup sphinx-gallery sphinx extension |
def search_stack_for_var(varname, verbose=util_arg.NOT_QUIET):
"""
Finds a varable (local or global) somewhere in the stack and returns the value
Args:
varname (str): variable name
Returns:
None if varname is not found else its value
"""
curr_frame = inspect.currentframe()
... | Finds a varable (local or global) somewhere in the stack and returns the value
Args:
varname (str): variable name
Returns:
None if varname is not found else its value |
def init_storage(self):
"""Set current term to zero upon initialization & voted_for to None"""
if not self.storage.exists('term'):
self.storage.update({
'term': 0,
})
self.storage.update({
'voted_for': None
}) | Set current term to zero upon initialization & voted_for to None |
def to_dot(self, path: str, title: Optional[str] = None):
"""
Print the automaton to a dot file
:param path: the path where to save the file.
:param title:
:return:
"""
g = graphviz.Digraph(format='svg')
g.node('fake', style='invisible')
for state... | Print the automaton to a dot file
:param path: the path where to save the file.
:param title:
:return: |
def connected(self, node_id):
"""Return True iff the node_id is connected."""
conn = self._conns.get(node_id)
if conn is None:
return False
return conn.connected() | Return True iff the node_id is connected. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.