code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def resync_package(ctx, opts, owner, repo, slug, skip_errors):
"""Resynchronise a package."""
click.echo(
"Resynchonising the %(slug)s package ... "
% {"slug": click.style(slug, bold=True)},
nl=False,
)
context_msg = "Failed to resynchronise package!"
with handle_api_excepti... | Resynchronise a package. |
def exit(self):
"""Terminate gdb process
Returns: None"""
if self.gdb_process:
self.gdb_process.terminate()
self.gdb_process.communicate()
self.gdb_process = None
return None | Terminate gdb process
Returns: None |
def _open_ftp(self):
# type: () -> FTP
"""Open an ftp object for the file."""
ftp = self.fs._open_ftp()
ftp.voidcmd(str("TYPE I"))
return ftp | Open an ftp object for the file. |
def _modify_new_lines(code_to_modify, offset, code_to_insert):
"""
Update new lines: the bytecode inserted should be the last instruction of the previous line.
:return: bytes sequence of code with updated lines offsets
"""
# There's a nice overview of co_lnotab in
# https://github.com/python/cpy... | Update new lines: the bytecode inserted should be the last instruction of the previous line.
:return: bytes sequence of code with updated lines offsets |
def _cast_dict(self, data_dict):
"""Internal method that makes sure any dictionary elements
are properly cast into the correct types, instead of
just treating everything like a string from the csv file.
Args:
data_dict: dictionary containing bro log data.
Returns:
... | Internal method that makes sure any dictionary elements
are properly cast into the correct types, instead of
just treating everything like a string from the csv file.
Args:
data_dict: dictionary containing bro log data.
Returns:
Cleaned Data dict. |
def renderHTTP(self, context):
"""
Render the wrapped resource if HTTPS is already being used, otherwise
invoke a helper which may generate a redirect.
"""
request = IRequest(context)
if request.isSecure():
renderer = self.wrappedResource
else:
... | Render the wrapped resource if HTTPS is already being used, otherwise
invoke a helper which may generate a redirect. |
def update(self, data, key):
"""Update a key's value's in a JSON file."""
og_data = self.read()
og_data[key] = data
self.write(og_data) | Update a key's value's in a JSON file. |
def populate_parallel_text(extract_dir: str,
file_sets: List[Tuple[str, str, str]],
dest_prefix: str,
keep_separate: bool,
head_n: int = 0):
"""
Create raw parallel train, dev, or test files with a given ... | Create raw parallel train, dev, or test files with a given prefix.
:param extract_dir: Directory where raw files (inputs) are extracted.
:param file_sets: Sets of files to use.
:param dest_prefix: Prefix for output files.
:param keep_separate: True if each file set (source-target pair) should have
... |
def show(self):
"""Print models sorted by metric."""
hyper_combos = itertools.product(*list(self.hyper_params.values()))
if not self.models:
c_values = [[idx + 1, list(val)] for idx, val in enumerate(hyper_combos)]
print(H2OTwoDimTable(
col_header=['Model'... | Print models sorted by metric. |
def jplace_split(self, original_jplace, cluster_dict):
'''
To make GraftM more efficient, reads are dereplicated and merged into
one file prior to placement using pplacer. This function separates the
single jplace file produced by this process into the separate jplace
files, one ... | To make GraftM more efficient, reads are dereplicated and merged into
one file prior to placement using pplacer. This function separates the
single jplace file produced by this process into the separate jplace
files, one per input file (if multiple were provided) and backfills
abundance ... |
def bytes_from_readable_size(C, size, suffix='B'):
"""given a readable_size (as produced by File.readable_size()), return the number of bytes."""
s = re.split("^([0-9\.]+)\s*([%s]?)%s?" % (''.join(C.SIZE_UNITS), suffix), size, flags=re.I)
bytes, unit = round(float(s[1])), s[2].upper()
wh... | given a readable_size (as produced by File.readable_size()), return the number of bytes. |
def count_variants_barplot(data):
""" Return HTML for the Variant Counts barplot """
keys = OrderedDict()
keys['snps'] = {'name': 'SNPs'}
keys['mnps'] = {'name': 'MNPs'}
keys['insertions'] = {'name': 'Insertions'}
keys['deletions'] = {'name': 'Deletions'}
keys['complex'] = {'name': 'Complex'... | Return HTML for the Variant Counts barplot |
def unescape(b, encoding):
'''Unescape all string and unicode literals in bytes.'''
return string_literal_re.sub(
lambda m: unescape_string_literal(m.group(), encoding),
b
) | Unescape all string and unicode literals in bytes. |
def _rescale(self, bands):
""" Rescale bands """
self.output("Rescaling", normal=True, arrow=True)
for key, band in enumerate(bands):
self.output("band %s" % self.bands[key], normal=True, color='green', indent=1)
bands[key] = sktransform.rescale(band, 2)
band... | Rescale bands |
def execute(self):
"""
Execute the actions necessary to perform a `molecule init scenario` and
returns None.
:return: None
"""
scenario_name = self._command_args['scenario_name']
role_name = os.getcwd().split(os.sep)[-1]
role_directory = util.abs_path(os.... | Execute the actions necessary to perform a `molecule init scenario` and
returns None.
:return: None |
def run_doxygen(folder):
"""Run the doxygen make command in the designated folder."""
try:
retcode = subprocess.call("cd %s; make doxygen" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:
sys.stderr.write("doxygen executi... | Run the doxygen make command in the designated folder. |
def l2norm_squared(a):
"""
L2 normalize squared
"""
value = 0
for i in xrange(a.shape[1]):
value += np.dot(a[:,i],a[:,i])
return value | L2 normalize squared |
def close_right(self):
"""
Closes every editors tabs on the left of the current one.
"""
current_widget = self.widget(self.tab_under_menu())
index = self.indexOf(current_widget)
if self._try_close_dirty_tabs(tab_range=range(index + 1, self.count())):
while Tru... | Closes every editors tabs on the left of the current one. |
def h(self):
r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently.
"""
if np.... | r"""
Returns the step size to be used in numerical differentiation with
respect to the model parameters.
The step size is given as a vector with length ``n_modelparams`` so
that each model parameter can be weighted independently. |
def persist_database(metamodel, path, mode='w'):
'''
Persist all instances, class definitions and association definitions in a
*metamodel* by serializing them and saving to a *path* on disk.
'''
with open(path, mode) as f:
for kind in sorted(metamodel.metaclasses.keys()):
metacla... | Persist all instances, class definitions and association definitions in a
*metamodel* by serializing them and saving to a *path* on disk. |
def crop(img, center, sz, mode='constant'):
"""
crop sz from ij as center
:param img:
:param center: ij
:param sz:
:param mode:
:return:
"""
center = np.array(center)
sz = np.array(sz)
istart = (center - sz / 2.).astype('int32')
iend = istart + sz
imsz = img.shape[:2]... | crop sz from ij as center
:param img:
:param center: ij
:param sz:
:param mode:
:return: |
def _notify_exit_thread(self, event):
"""
Notify the termination of a thread.
This is done automatically by the L{Debug} class, you shouldn't need
to call it yourself.
@type event: L{ExitThreadEvent}
@param event: Exit thread event.
@rtype: bool
@retu... | Notify the termination of a thread.
This is done automatically by the L{Debug} class, you shouldn't need
to call it yourself.
@type event: L{ExitThreadEvent}
@param event: Exit thread event.
@rtype: bool
@return: C{True} to call the user-defined handle, C{False} othe... |
def availableRoles(self):
'''
Returns the set of roles for this event. Since roles are not always custom specified for
event, this looks for the set of available roles in multiple places. If no roles are found,
then the method returns an empty list, in which case it can be assumed that... | Returns the set of roles for this event. Since roles are not always custom specified for
event, this looks for the set of available roles in multiple places. If no roles are found,
then the method returns an empty list, in which case it can be assumed that the event's registration
is not role-... |
def create_thumbnail(uuid, thumbnail_width):
"""Create the thumbnail for an image."""
# size = '!' + thumbnail_width + ','
size = thumbnail_width + ',' # flask_iiif doesn't support ! at the moment
thumbnail = IIIFImageAPI.get('v2', uuid, size, 0, 'default', 'jpg') | Create the thumbnail for an image. |
def size(args):
"""
%prog size fastqfile
Find the total base pairs in a list of fastq files
"""
p = OptionParser(size.__doc__)
opts, args = p.parse_args(args)
if len(args) < 1:
sys.exit(not p.print_help())
total_size = total_numrecords = 0
for f in args:
cur_size =... | %prog size fastqfile
Find the total base pairs in a list of fastq files |
def chk_col_numbers(line_num, num_cols, tax_id_col, id_col, symbol_col):
"""
Check that none of the input column numbers is out of range.
(Instead of defining this function, we could depend on Python's built-in
IndexError exception for this issue, but the IndexError exception wouldn't
include line n... | Check that none of the input column numbers is out of range.
(Instead of defining this function, we could depend on Python's built-in
IndexError exception for this issue, but the IndexError exception wouldn't
include line number information, which is helpful for users to find exactly
which line is the c... |
def get_common_password_hash(self, salt):
"""x = H(s | H(I | ":" | P))
:param int salt:
:rtype: int
"""
password = self._password
if password is None:
raise SRPException('User password should be in context for this scenario.')
return self.hash(salt, ... | x = H(s | H(I | ":" | P))
:param int salt:
:rtype: int |
def guess_version_by_running_live_package(
pkg_key, default="?"
): # type: (str,str) -> Any
"""Guess the version of a pkg when pip doesn't provide it.
:param str pkg_key: key of the package
:param str default: default version to return if unable to find
:returns: version
:rtype: string
""... | Guess the version of a pkg when pip doesn't provide it.
:param str pkg_key: key of the package
:param str default: default version to return if unable to find
:returns: version
:rtype: string |
def list(region=None, key=None, keyid=None, profile=None):
'''
List all buckets owned by the authenticated sender of the request.
Returns list of buckets
CLI Example:
.. code-block:: yaml
Owner: {...}
Buckets:
- {...}
- {...}
'''
try:
conn = _... | List all buckets owned by the authenticated sender of the request.
Returns list of buckets
CLI Example:
.. code-block:: yaml
Owner: {...}
Buckets:
- {...}
- {...} |
def manage_request_types_view(request):
''' Manage requests. Display a list of request types with links to edit them.
Also display a link to add a new request type. Restricted to presidents and superadmins.
'''
request_types = RequestType.objects.all()
return render_to_response('manage_request_typ... | Manage requests. Display a list of request types with links to edit them.
Also display a link to add a new request type. Restricted to presidents and superadmins. |
def pip_search(self, search_string=None):
"""Search for pip packages in PyPI matching `search_string`."""
extra_args = ['search', search_string]
return self._call_pip(name='root', extra_args=extra_args,
callback=self._pip_search) | Search for pip packages in PyPI matching `search_string`. |
def first_rec(ofile, Rec, file_type):
"""
opens the file ofile as a magic template file with headers as the keys to Rec
"""
keylist = []
opened = False
# sometimes Windows needs a little extra time to open a file
# or else it throws an error
while not opened:
try:
pma... | opens the file ofile as a magic template file with headers as the keys to Rec |
def draw_on_image(self, image, color=(0, 255, 0), alpha=1.0, size=3,
copy=True, raise_if_out_of_image=False):
"""
Draw the keypoint onto a given image.
The keypoint is drawn as a square.
Parameters
----------
image : (H,W,3) ndarray
The... | Draw the keypoint onto a given image.
The keypoint is drawn as a square.
Parameters
----------
image : (H,W,3) ndarray
The image onto which to draw the keypoint.
color : int or list of int or tuple of int or (3,) ndarray, optional
The RGB color of the k... |
def push_notification_devices_destroy_many(self, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/push_notification_devices#bulk-unregister-push-notification-devices"
api_path = "/api/v2/push_notification_devices/destroy_many.json"
return self.call(api_path, method="POST", data... | https://developer.zendesk.com/rest_api/docs/core/push_notification_devices#bulk-unregister-push-notification-devices |
def _set_redist_rip(self, v, load=False):
"""
Setter method for redist_rip, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_rip (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_redist_rip is considered as a private
meth... | Setter method for redist_rip, mapped from YANG variable /isis_state/router_isis_config/is_address_family_v6/redist_rip (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_redist_rip is considered as a private
method. Backends looking to populate this variable should
... |
def clean_inputs(data):
"""Clean BED input files to avoid overlapping segments that cause downstream issues.
Per-merges inputs to avoid needing to call multiple times during later parallel steps.
"""
if not utils.get_in(data, ("config", "algorithm", "variant_regions_orig")):
data["config"]["alg... | Clean BED input files to avoid overlapping segments that cause downstream issues.
Per-merges inputs to avoid needing to call multiple times during later parallel steps. |
def Decompress(self, compressed_data):
"""Decompresses the compressed data.
Args:
compressed_data (bytes): compressed data.
Returns:
tuple(bytes, bytes): uncompressed data and remaining compressed data.
Raises:
BackEndError: if the XZ compressed stream cannot be decompressed.
""... | Decompresses the compressed data.
Args:
compressed_data (bytes): compressed data.
Returns:
tuple(bytes, bytes): uncompressed data and remaining compressed data.
Raises:
BackEndError: if the XZ compressed stream cannot be decompressed. |
def GetDatabaseAccount(self, url_connection=None):
"""Gets database account info.
:return:
The Database Account.
:rtype:
documents.DatabaseAccount
"""
if url_connection is None:
url_connection = self.url_connection
initial_headers = ... | Gets database account info.
:return:
The Database Account.
:rtype:
documents.DatabaseAccount |
def count_leaves(x):
"""
Return the number of non-sequence items in a given recursive sequence.
"""
if hasattr(x, 'keys'):
x = list(x.values())
if hasattr(x, '__getitem__'):
return sum(map(count_leaves, x))
return 1 | Return the number of non-sequence items in a given recursive sequence. |
def runCLI():
"""
The starting point for the execution of the Scrapple command line tool.
runCLI uses the docstring as the usage description for the scrapple command. \
The class for the required command is selected by a dynamic dispatch, and the \
command is executed through the execute_command() ... | The starting point for the execution of the Scrapple command line tool.
runCLI uses the docstring as the usage description for the scrapple command. \
The class for the required command is selected by a dynamic dispatch, and the \
command is executed through the execute_command() method of the command clas... |
def delete_namespaced_deployment(self, name, namespace, **kwargs): # noqa: E501
"""delete_namespaced_deployment # noqa: E501
delete a Deployment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
... | delete_namespaced_deployment # noqa: E501
delete a Deployment # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_deployment(name, namespace, async_req=True)
>... |
def wrap_handler(self, handler, context_switcher):
"""Enable/Disable handler."""
context_switcher.add_context_in(lambda: LOGGER.addHandler(self.handler))
context_switcher.add_context_out(lambda: LOGGER.removeHandler(self.handler)) | Enable/Disable handler. |
def convert_parameters(self, request, *args, **kwargs):
'''
Iterates the urlparams and converts them according to the
type hints in the current view function. This is the primary
function of the class.
'''
args = list(args)
urlparam_i = 0
parameters = se... | Iterates the urlparams and converts them according to the
type hints in the current view function. This is the primary
function of the class. |
def metadata_to_buffers(metadata):
"""
Transform a dict of metadata into a sequence of buffers.
:param metadata: The metadata, as a dict.
:returns: A list of buffers.
"""
results = []
for key, value in metadata.items():
assert len(key) < 256
assert len(value) < 2 ** 32
... | Transform a dict of metadata into a sequence of buffers.
:param metadata: The metadata, as a dict.
:returns: A list of buffers. |
def fit_overlays(self, text, run_matchers=None, **kw):
"""
First all matchers will run and then I will try to combine
them. Use run_matchers to force running(True) or not
running(False) the matchers.
See ListMatcher for arguments.
"""
self._maybe_run_matchers(tex... | First all matchers will run and then I will try to combine
them. Use run_matchers to force running(True) or not
running(False) the matchers.
See ListMatcher for arguments. |
def groupfinder(userid, request):
"""
Default groupfinder implementaion for pyramid applications
:param userid:
:param request:
:return:
"""
if userid and hasattr(request, "user") and request.user:
groups = ["group:%s" % g.id for g in request.user.groups]
return groups
r... | Default groupfinder implementaion for pyramid applications
:param userid:
:param request:
:return: |
def update(self, pbar):
'Updates the widget with the current SI prefixed speed.'
if pbar.seconds_elapsed < 2e-6 or pbar.currval < 2e-6: # =~ 0
scaled = power = 0
else:
speed = pbar.currval / pbar.seconds_elapsed
power = int(math.log(speed, 1000))
... | Updates the widget with the current SI prefixed speed. |
def uniform_random_global_network(loc=2000, scale=250, n=100):
"""
Returns an array of `n` uniformally randomly distributed `shapely.geometry.Point` objects.
"""
arr = (np.random.normal(loc, scale, n)).astype(int)
return pd.DataFrame(data={'mock_variable': arr,
'from': ... | Returns an array of `n` uniformally randomly distributed `shapely.geometry.Point` objects. |
def search_profiles(
self,
parent,
request_metadata,
profile_query=None,
page_size=None,
offset=None,
disable_spell_check=None,
order_by=None,
case_sensitive_sort=None,
histogram_queries=None,
retry=google.api_core.gapic_v1.method.D... | Searches for profiles within a tenant.
For example, search by raw queries "software engineer in Mountain View"
or search by structured filters (location filter, education filter,
etc.).
See ``SearchProfilesRequest`` for more information.
Example:
>>> from google.cl... |
def fail(self, message, status=500, **kw):
"""Set a JSON error object and a status to the response
"""
self.request.response.setStatus(status)
result = {"success": False, "errors": message, "status": status}
result.update(kw)
return result | Set a JSON error object and a status to the response |
def load_structure_from_file(context: InstaloaderContext, filename: str) -> JsonExportable:
"""Loads a :class:`Post`, :class:`Profile` or :class:`StoryItem` from a '.json' or '.json.xz' file that
has been saved by :func:`save_structure_to_file`.
:param context: :attr:`Instaloader.context` linked to the new... | Loads a :class:`Post`, :class:`Profile` or :class:`StoryItem` from a '.json' or '.json.xz' file that
has been saved by :func:`save_structure_to_file`.
:param context: :attr:`Instaloader.context` linked to the new object, used for additional queries if neccessary.
:param filename: Filename, ends in '.json' ... |
def parse(cls, value, default=_no_default):
"""Parses a flag integer or string into a Flags instance.
Accepts the following types:
- Members of this enum class. These are returned directly.
- Integers. These are converted directly into a Flags instance with the given name.
- Str... | Parses a flag integer or string into a Flags instance.
Accepts the following types:
- Members of this enum class. These are returned directly.
- Integers. These are converted directly into a Flags instance with the given name.
- Strings. The function accepts a comma-delimited list of fl... |
def usedoc(other):
'''
Decorator which copies __doc__ of given object into decorated one.
Usage:
>>> def fnc1():
... """docstring"""
... pass
>>> @usedoc(fnc1)
... def fnc2():
... pass
>>> fnc2.__doc__
'docstring'collections.abc.D
:param other: anything wit... | Decorator which copies __doc__ of given object into decorated one.
Usage:
>>> def fnc1():
... """docstring"""
... pass
>>> @usedoc(fnc1)
... def fnc2():
... pass
>>> fnc2.__doc__
'docstring'collections.abc.D
:param other: anything with a __doc__ attribute
:type... |
def get(name, function=None):
"""Get a setting.
`name` should be the name of the setting to look for. If the
optional argument `function` is passed, this will look for a
value local to the function before retrieving the global
value.
"""
if function is not None:... | Get a setting.
`name` should be the name of the setting to look for. If the
optional argument `function` is passed, this will look for a
value local to the function before retrieving the global
value. |
def verify_fft_options(opt, parser):
"""Parses the FFT options and verifies that they are
reasonable.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes.
parser : object
OptionParser instance.
... | Parses the FFT options and verifies that they are
reasonable.
Parameters
----------
opt : object
Result of parsing the CLI with OptionParser, or any object with the
required attributes.
parser : object
OptionParser instance. |
def get_closed_indices(self):
"""
Get all closed indices.
"""
state = self.conn.cluster.state()
status = self.status()
indices_metadata = set(state['metadata']['indices'].keys())
indices_status = set(status['indices'].keys())
return indices_metadata.diff... | Get all closed indices. |
def _get_gs_path():
"""Guess where the Ghostscript executable is
and return its absolute path name."""
path = os.environ.get("PATH", os.defpath)
for dir in path.split(os.pathsep):
for name in ("gs", "gs.exe", "gswin32c.exe"):
g = os.path.join(dir, name)
if os.path.exists... | Guess where the Ghostscript executable is
and return its absolute path name. |
def update_where(self, res, depth=0, since=None, **kwargs):
"Like update() but uses WHERE-style args"
fetch = lambda: self._fetcher.fetch_all_latest(res, 0, kwargs, since=since)
self._update(res, fetch, depth) | Like update() but uses WHERE-style args |
def subst_path(self, path, target=None, source=None):
"""Substitute a path list, turning EntryProxies into Nodes
and leaving Nodes (and other objects) as-is."""
if not SCons.Util.is_List(path):
path = [path]
def s(obj):
"""This is the "string conversion" routine... | Substitute a path list, turning EntryProxies into Nodes
and leaving Nodes (and other objects) as-is. |
def thermal_conductivity_Magomedov(T, P, ws, CASRNs, k_w=None):
r'''Calculate the thermal conductivity of an aqueous mixture of
electrolytes using the form proposed by Magomedov [1]_.
Parameters are loaded by the function as needed. Function will fail if an
electrolyte is not in the database.
.. ma... | r'''Calculate the thermal conductivity of an aqueous mixture of
electrolytes using the form proposed by Magomedov [1]_.
Parameters are loaded by the function as needed. Function will fail if an
electrolyte is not in the database.
.. math::
\lambda = \lambda_w\left[ 1 - \sum_{i=1}^n A_i (w_i + 2... |
def all_pairs(seq1, seq2=None):
"""Yields all pairs drawn from ``seq1`` and ``seq2``.
If ``seq2`` is ``None``, ``seq2 = seq1``.
>>> stop_at.ed(all_pairs(xrange(100000), xrange(100000)), 8)
((0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (0, 7))
"""
if seq2 is None: seq2 = seq1... | Yields all pairs drawn from ``seq1`` and ``seq2``.
If ``seq2`` is ``None``, ``seq2 = seq1``.
>>> stop_at.ed(all_pairs(xrange(100000), xrange(100000)), 8)
((0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (0, 7)) |
def set(self, prefix, url, obj):
""" Add an object into the cache """
if not self.cache_dir:
return
filename = self._get_cache_file(prefix, url)
try:
os.makedirs(os.path.join(self.cache_dir, prefix))
except OSError:
pass
with open(fi... | Add an object into the cache |
def widget_from_django_field(cls, f, default=widgets.Widget):
"""
Returns the widget that would likely be associated with each
Django type.
Includes mapping of Postgres Array and JSON fields. In the case that
psycopg2 is not installed, we consume the error and process the field
... | Returns the widget that would likely be associated with each
Django type.
Includes mapping of Postgres Array and JSON fields. In the case that
psycopg2 is not installed, we consume the error and process the field
regardless. |
def floating_ip_disassociate(self, server_name, floating_ip):
'''
Disassociate a floating IP from server
.. versionadded:: 2016.3.0
'''
nt_ks = self.compute_conn
server_ = self.server_by_name(server_name)
server = nt_ks.servers.get(server_.__dict__['id'])
... | Disassociate a floating IP from server
.. versionadded:: 2016.3.0 |
def arr_astype(arr_type): # function factory
'''Change dtype of array.
Parameters:
arr_type : str, np.dtype
Character codes (e.g. 'b', '>H'), type strings (e.g. 'i4', 'f8'), Python types (e.g. float, int) and numpy dtypes (e.g. np.uint32) are allowed.
Returns:
array : np.array
... | Change dtype of array.
Parameters:
arr_type : str, np.dtype
Character codes (e.g. 'b', '>H'), type strings (e.g. 'i4', 'f8'), Python types (e.g. float, int) and numpy dtypes (e.g. np.uint32) are allowed.
Returns:
array : np.array |
def _init_display(self):
"""!
\~english
Initialize the SSD1306 display chip
\~chinese
初始化SSD1306显示芯片
"""
self._command([
# 0xAE
self.CMD_SSD1306_DISPLAY_OFF,
#Stop Scroll
self.CMD_SSD1306_SET_SCROLL_DEACTIVE,
... | !
\~english
Initialize the SSD1306 display chip
\~chinese
初始化SSD1306显示芯片 |
def _discovery(self):
"""
Find other servers asking nodes to given server
"""
data = self.cluster_nodes()
self.cluster_name = data["cluster_name"]
for _, nodedata in list(data["nodes"].items()):
server = nodedata['http_address'].replace("]", "").replace("inet[... | Find other servers asking nodes to given server |
def get(self, key, default=NoDefault):
"""Retrieve a value from its key.
Retrieval steps are:
1) Normalize the key
2) For each option group:
a) Retrieve the value at that key
b) If no value exists, continue
c) If the value is an instance of 'Default', co... | Retrieve a value from its key.
Retrieval steps are:
1) Normalize the key
2) For each option group:
a) Retrieve the value at that key
b) If no value exists, continue
c) If the value is an instance of 'Default', continue
d) Otherwise, return the value
... |
def drilldown_tree(self, session=None, json=False, json_fields=None):
""" This method generate a branch from a tree, begining with current
node.
For example:
node7.drilldown_tree()
.. code::
level Nested sets example
1 ... | This method generate a branch from a tree, begining with current
node.
For example:
node7.drilldown_tree()
.. code::
level Nested sets example
1 1(1)22 ---------------------
___________... |
def authenticate(self):
"""Attempts to authenticate the user if a token was provided."""
if request.headers.get('Authorization', '').startswith('Negotiate '):
in_token = base64.b64decode(request.headers['Authorization'][10:])
try:
creds = current_app.extensions['... | Attempts to authenticate the user if a token was provided. |
def make_gui(self):
""" Setups the general structure of the gui, the first function called """
self.option_window = Toplevel()
self.option_window.protocol("WM_DELETE_WINDOW", self.on_exit)
self.canvas_frame = tk.Frame(self, height=500)
self.option_frame = tk.Frame(self.option_win... | Setups the general structure of the gui, the first function called |
def check_if_ready(self):
"""Check for and fetch the results if ready."""
try:
results = self.manager.check(self.results_id)
except exceptions.ResultsNotReady as e:
self._is_ready = False
self._not_ready_exception = e
except exceptions.ResultsExpired a... | Check for and fetch the results if ready. |
def update_distant_reference(self, ref):
"""Validate and update the reference in Zotero.
Existing fields not present will be left unmodified.
"""
self.validate_reference_data(ref["data"])
self._zotero_lib.update_item(ref) | Validate and update the reference in Zotero.
Existing fields not present will be left unmodified. |
def show_tracebacks(self):
""" Show tracebacks """
if self.broker.tracebacks:
print(file=self.stream)
print("Tracebacks:", file=self.stream)
for t in self.broker.tracebacks.values():
print(t, file=self.stream) | Show tracebacks |
def get_release_environment(self, project, release_id, environment_id):
"""GetReleaseEnvironment.
[Preview API] Get a release environment.
:param str project: Project ID or project name
:param int release_id: Id of the release.
:param int environment_id: Id of the release environ... | GetReleaseEnvironment.
[Preview API] Get a release environment.
:param str project: Project ID or project name
:param int release_id: Id of the release.
:param int environment_id: Id of the release environment.
:rtype: :class:`<ReleaseEnvironment> <azure.devops.v5_1.release.model... |
def get_filter_func(patterns, prefix):
"""
Provides a filter function that can be used as filter argument on ``tarfile.add``. Generates the filter based on
the patterns and prefix provided. Patterns should be a list of tuples. Each tuple consists of a compiled RegEx
pattern and a boolean, indicating if ... | Provides a filter function that can be used as filter argument on ``tarfile.add``. Generates the filter based on
the patterns and prefix provided. Patterns should be a list of tuples. Each tuple consists of a compiled RegEx
pattern and a boolean, indicating if it is an ignore entry or a negative exclusion (i.e.... |
def add_header_info(data_api, struct_inflator):
""" Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object
"""
struct_inflator.set_header_info(data_api.r_free,
... | Add ancilliary header information to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object |
def cmdloop(self, intro=None):
''' Override the command loop to handle Ctrl-C. '''
self.preloop()
# Set up completion with readline.
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
... | Override the command loop to handle Ctrl-C. |
def accessibles(self, roles=None):
"""
Returns the list of *slugs* for which the accounts are accessibles
by ``request.user`` filtered by ``roles`` if present.
"""
return [org['slug']
for org in self.get_accessibles(self.request, roles=roles)] | Returns the list of *slugs* for which the accounts are accessibles
by ``request.user`` filtered by ``roles`` if present. |
def update_firewall_rule(firewall_rule,
protocol=None,
action=None,
name=None,
description=None,
ip_version=None,
source_ip_address=None,
destina... | Update a firewall rule
CLI Example:
.. code-block:: bash
salt '*' neutron.update_firewall_rule firewall_rule protocol=PROTOCOL action=ACTION
name=NAME description=DESCRIPTION ip_version=IP_VERSION
source_ip_address=SOURCE_IP_ADDRESS destination_ip_address=DESTINATION_I... |
def get_version(brain_or_object):
"""Get the version of the current object
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: The current version of the object, or None if not available
:rtype: int or Non... | Get the version of the current object
:param brain_or_object: A single catalog brain or content object
:type brain_or_object: ATContentType/DexterityContentType/CatalogBrain
:returns: The current version of the object, or None if not available
:rtype: int or None |
def removeSessionWithKey(self, key):
"""
Remove a persistent session, if it exists.
@type key: L{bytes}
@param key: The persistent session identifier.
"""
self.store.query(
PersistentSession,
PersistentSession.sessionKey == key).deleteFromStore() | Remove a persistent session, if it exists.
@type key: L{bytes}
@param key: The persistent session identifier. |
def _method_complete(self, result):
"""Called after an extention method with the result."""
if isinstance(result, PrettyTensor):
self._head = result
return self
elif isinstance(result, Loss):
return result
elif isinstance(result, PrettyTensorTupleMixin):
self._head = result[0]
... | Called after an extention method with the result. |
def _load_actor_from_local(self, driver_id, function_descriptor):
"""Load actor class from local code."""
module_name, class_name = (function_descriptor.module_name,
function_descriptor.class_name)
try:
module = importlib.import_module(module_name)
... | Load actor class from local code. |
def locked_get(self):
"""Retrieve Credential from file.
Returns:
oauth2client.client.Credentials
Raises:
IOError if the file is a symbolic link.
"""
credentials = None
_helpers.validate_file(self._filename)
try:
f = open(self.... | Retrieve Credential from file.
Returns:
oauth2client.client.Credentials
Raises:
IOError if the file is a symbolic link. |
def _wraptext(self, text, indent=0, width=0):
"""Shorthand for '\n'.join(self._wrap(par, indent, width) for par in text)."""
return '\n'.join(self._wrap(par, indent, width) for par in text) | Shorthand for '\n'.join(self._wrap(par, indent, width) for par in text). |
def customFilter(self, filterFunc):
'''
customFilter - Apply a custom filter to elements and return a QueryableList of matches
@param filterFunc <lambda/function< - A lambda/function that is passed an item, and
returns True if the item matches (will be returned), otherwis... | customFilter - Apply a custom filter to elements and return a QueryableList of matches
@param filterFunc <lambda/function< - A lambda/function that is passed an item, and
returns True if the item matches (will be returned), otherwise False.
@return - A QueryableList object of th... |
def clear_processes(self):
"""
Removes all L{Process}, L{Thread} and L{Module} objects in this snapshot.
"""
#self.close_process_and_thread_handles()
for aProcess in self.iter_processes():
aProcess.clear()
self.__processDict = dict() | Removes all L{Process}, L{Thread} and L{Module} objects in this snapshot. |
def populate_field_list(self, excluded_fields=None):
"""Helper to add field of the layer to the list.
:param excluded_fields: List of field that want to be excluded.
:type excluded_fields: list
"""
# Populate fields list
if excluded_fields is None:
excluded_f... | Helper to add field of the layer to the list.
:param excluded_fields: List of field that want to be excluded.
:type excluded_fields: list |
def runif(self, seed=None):
"""
Generate a column of random numbers drawn from a uniform distribution [0,1) and
having the same data layout as the source frame.
:param int seed: seed for the random number generator.
:returns: Single-column H2OFrame filled with doubles sampled u... | Generate a column of random numbers drawn from a uniform distribution [0,1) and
having the same data layout as the source frame.
:param int seed: seed for the random number generator.
:returns: Single-column H2OFrame filled with doubles sampled uniformly from [0,1). |
def collect_ansible_classes():
"""Run playbook and collect classes of ansible that are run."""
def trace_calls(frame, event, arg): # pylint: disable=W0613
"""Trace function calls to collect ansible classes.
Trace functions and check if they have self as an arg. If so, get their class if the
... | Run playbook and collect classes of ansible that are run. |
def logspace(self,bins=None,units=None,conversion_function=convert_time,resolution=None,end_at_end=True):
""" bins overwrites resolution """
if type(bins) in [list, np.ndarray]:
return bins
min = conversion_function(self.min,from_units=self.units,to_units=units)
max = convers... | bins overwrites resolution |
def send_await(self, msg, deadline=None):
"""
Like :meth:`send_async`, but expect a single reply (`persist=False`)
delivered within `deadline` seconds.
:param mitogen.core.Message msg:
The message.
:param float deadline:
If not :data:`None`, seconds befor... | Like :meth:`send_async`, but expect a single reply (`persist=False`)
delivered within `deadline` seconds.
:param mitogen.core.Message msg:
The message.
:param float deadline:
If not :data:`None`, seconds before timing out waiting for a reply.
:returns:
... |
def _head_object(s3_conn, bucket, key):
"""Retrieve information about an object in S3 if it exists.
Args:
s3_conn (botocore.client.S3): S3 connection to use for operations.
bucket (str): name of the bucket containing the key.
key (str): name of the key to lookup.
Returns:
d... | Retrieve information about an object in S3 if it exists.
Args:
s3_conn (botocore.client.S3): S3 connection to use for operations.
bucket (str): name of the bucket containing the key.
key (str): name of the key to lookup.
Returns:
dict: S3 object information, or None if the obje... |
def getAllData(self, temp = True, accel = True, gyro = True):
"""!
Get all the available data.
@param temp: True - Allow to return Temperature data
@param accel: True - Allow to return Accelerometer data
@param gyro: True - Allow to return Gyroscope data
@return a dicti... | !
Get all the available data.
@param temp: True - Allow to return Temperature data
@param accel: True - Allow to return Accelerometer data
@param gyro: True - Allow to return Gyroscope data
@return a dictionary data
@retval {} Did not read any data
@retv... |
def refresh(self):
'''Refetch instance data from the API.
'''
response = requests.get('%s/categories/%s' % (API_BASE_URL, self.name))
attributes = response.json()
self.ancestors = [Category(name) for name in attributes['ancestors']]
self.contents = WikiText(attributes['content... | Refetch instance data from the API. |
def _raise_unrecoverable_error_payplug(self, exception):
"""
Raises an exceptions.ClientError with a message telling that the error probably comes from PayPlug.
:param exception: Exception that caused the ClientError.
:type exception: Exception
:raise exceptions.ClientError
... | Raises an exceptions.ClientError with a message telling that the error probably comes from PayPlug.
:param exception: Exception that caused the ClientError.
:type exception: Exception
:raise exceptions.ClientError |
def GetLastKey(self, voice=1):
"""key as in musical key, not index"""
voice_obj = self.GetChild(voice)
if voice_obj is not None:
key = BackwardSearch(KeyNode, voice_obj, 1)
if key is not None:
return key
else:
if hasattr(self, ... | key as in musical key, not index |
def receive_message(self, message, data): # noqa: E501 pylint: disable=too-many-return-statements
""" Called when a multizone message is received. """
if data[MESSAGE_TYPE] == TYPE_DEVICE_ADDED:
uuid = data['device']['deviceId']
name = data['device']['name']
self._ad... | Called when a multizone message is received. |
def format_hexdump(arg):
"""Convert the bytes object to a hexdump.
The output format will be:
<offset, 4-byte> <16-bytes of output separated by 1 space> <16 ascii characters>
"""
line = ''
for i in range(0, len(arg), 16):
if i > 0:
line += '\n'
chunk = arg[i:i +... | Convert the bytes object to a hexdump.
The output format will be:
<offset, 4-byte> <16-bytes of output separated by 1 space> <16 ascii characters> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.