code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def numpymat2df(mat):
"""
Sometimes (though not very often) it is useful to convert a numpy matrix
which has no column names to a Pandas dataframe for use of the Pandas
functions. This method converts a 2D numpy matrix to Pandas dataframe
with default column headers.
Parameters
----------
... | Sometimes (though not very often) it is useful to convert a numpy matrix
which has no column names to a Pandas dataframe for use of the Pandas
functions. This method converts a 2D numpy matrix to Pandas dataframe
with default column headers.
Parameters
----------
mat : The numpy matrix
Re... |
def set_settings_env(executable_folder=None):
"""
Add all application folders
:param executable_folder: the folder that contains local and external_app_repos
:return:
"""
executable_folder = executable_folder or get_executable_folder()
# print "!!!!!!!!!!!!!! exec... | Add all application folders
:param executable_folder: the folder that contains local and external_app_repos
:return: |
def build(self):
"""
Builds this object into the desired output information.
"""
signed = bool(self.options() & Builder.Options.Signed)
# remove previous build information
buildpath = self.buildPath()
if not buildpath:
raise errors.InvalidBuildPath(bu... | Builds this object into the desired output information. |
def generate_key_data_from_nonce(server_nonce, new_nonce):
"""Generates the key data corresponding to the given nonce"""
server_nonce = server_nonce.to_bytes(16, 'little', signed=True)
new_nonce = new_nonce.to_bytes(32, 'little', signed=True)
hash1 = sha1(new_nonce + server_nonce).digest()
hash2 = s... | Generates the key data corresponding to the given nonce |
def set_parent_on_new(self, parentrefobj):
"""Contextmanager that on close will get all new
unwrapped refobjects, and for every refobject with no parent
sets is to the given one.
:returns: None
:rtype: None
:raises: None
"""
refobjinter = self.get_refobji... | Contextmanager that on close will get all new
unwrapped refobjects, and for every refobject with no parent
sets is to the given one.
:returns: None
:rtype: None
:raises: None |
def oggvorbis(s):
"""
This is taken from the ogg vorbis spec
(http://xiph.org/vorbis/doc/Vorbis_I_spec.html)
:param s: the total length of the window, in samples
"""
try:
s = np.arange(s)
except TypeError:
s = np.arange(s[0])
i = np.sin((s + .5) / len(s) * np.pi) ** 2
... | This is taken from the ogg vorbis spec
(http://xiph.org/vorbis/doc/Vorbis_I_spec.html)
:param s: the total length of the window, in samples |
def start(self, io_loop):
"""
Run the ``before_run`` callbacks and queue to ``on_start`` callbacks.
:param tornado.ioloop.IOLoop io_loop: loop to start the app on.
"""
for callback in self.before_run_callbacks:
try:
callback(self.tornado_application,... | Run the ``before_run`` callbacks and queue to ``on_start`` callbacks.
:param tornado.ioloop.IOLoop io_loop: loop to start the app on. |
def update(self, list_id, subscriber_hash, data):
"""
Update tags for a specific subscriber.
The documentation lists only the tags request body parameter so it is
being documented and error-checked as if it were required based on the
description of the method.
The data ... | Update tags for a specific subscriber.
The documentation lists only the tags request body parameter so it is
being documented and error-checked as if it were required based on the
description of the method.
The data list needs to include a "status" key. This determines if the
t... |
def get_host_port_names(self, host_name):
""" return a list of the port names of XIV host """
port_names = list()
host = self.get_hosts_by_name(host_name)
fc_ports = host.fc_ports
iscsi_ports = host.iscsi_ports
port_names.extend(fc_ports.split(',') if fc_ports != ''... | return a list of the port names of XIV host |
def parse(chord):
""" Parse a string to get chord component
:param str chord: str expression of a chord
:rtype: (str, pychord.Quality, str, str)
:return: (root, quality, appended, on)
"""
if len(chord) > 1 and chord[1] in ("b", "#"):
root = chord[:2]
rest = chord[2:]
else:
... | Parse a string to get chord component
:param str chord: str expression of a chord
:rtype: (str, pychord.Quality, str, str)
:return: (root, quality, appended, on) |
def row_sparse_array(arg1, shape=None, ctx=None, dtype=None):
"""Creates a `RowSparseNDArray`, a multidimensional row sparse array with a set of \
tensor slices at given indices.
The RowSparseNDArray can be instantiated in several ways:
- row_sparse_array(D):
to construct a RowSparseNDArray wi... | Creates a `RowSparseNDArray`, a multidimensional row sparse array with a set of \
tensor slices at given indices.
The RowSparseNDArray can be instantiated in several ways:
- row_sparse_array(D):
to construct a RowSparseNDArray with a dense ndarray ``D``
- **D** (*array_like*) - An object ... |
def storage_at_hvmv_substation(mv_grid, parameters, mode=None):
"""
Place storage at HV/MV substation bus bar.
Parameters
----------
mv_grid : :class:`~.grid.grids.MVGrid`
MV grid instance
parameters : :obj:`dict`
Dictionary with storage parameters. Must at least contain
... | Place storage at HV/MV substation bus bar.
Parameters
----------
mv_grid : :class:`~.grid.grids.MVGrid`
MV grid instance
parameters : :obj:`dict`
Dictionary with storage parameters. Must at least contain
'nominal_power'. See :class:`~.grid.network.StorageControl` for more
... |
def dict(self):
"""A dict that holds key/values for all of the properties in the
object.
:return:
"""
SKIP_KEYS = ('_source_table', '_dest_table', 'd_vid', 't_vid', 'st_id',
'dataset', 'hash', 'process_records')
return OrderedDict([(k, getattr(self,... | A dict that holds key/values for all of the properties in the
object.
:return: |
def save(self, *args, **kwargs):
"""
**uid**: :code:`{office.uid}_{cycle.uid}_race`
"""
self.uid = '{}_{}_race'.format(
self.office.uid,
self.cycle.uid
)
name_label = '{0} {1}'.format(
self.cycle.name,
self.office.label
... | **uid**: :code:`{office.uid}_{cycle.uid}_race` |
def apply_inverse(self, y):
"""
Self-consistently apply the inverse of the computed kernel matrix to
some vector or matrix of samples. This method subtracts the mean,
sorts the samples, then returns the samples in the correct (unsorted)
order.
:param y: ``(nsamples, )`` ... | Self-consistently apply the inverse of the computed kernel matrix to
some vector or matrix of samples. This method subtracts the mean,
sorts the samples, then returns the samples in the correct (unsorted)
order.
:param y: ``(nsamples, )`` or ``(nsamples, K)``
The vector (or ... |
def draw_graph(matrix, clusters, **kwargs):
"""
Visualize the clustering
:param matrix: The unprocessed adjacency matrix
:param clusters: list of tuples containing clusters as returned
by 'get_clusters'
:param kwargs: Additional keyword arguments to be passed to
... | Visualize the clustering
:param matrix: The unprocessed adjacency matrix
:param clusters: list of tuples containing clusters as returned
by 'get_clusters'
:param kwargs: Additional keyword arguments to be passed to
networkx.draw_networkx |
def makedirs(path, mode=0o777, exist_ok=False):
"""A wrapper of os.makedirs()."""
os.makedirs(path, mode, exist_ok) | A wrapper of os.makedirs(). |
def create_run(cls, *args, **kwargs):
"""
:return:
a delegator function that calls the ``cls`` constructor whose arguments being
a seed tuple followed by supplied ``*args`` and ``**kwargs``, then returns
the object's ``run`` method. By default, a thread wrapping that ``run`` method
... | :return:
a delegator function that calls the ``cls`` constructor whose arguments being
a seed tuple followed by supplied ``*args`` and ``**kwargs``, then returns
the object's ``run`` method. By default, a thread wrapping that ``run`` method
is spawned. |
def set_pump_status(self, status):
"""
Updates pump status and logs update to console.
"""
self.pump_status = status
_logger.info("%r partition %r", status, self.lease.partition_id) | Updates pump status and logs update to console. |
def feed_data(self, data: bytes) -> None:
"""
代理 feed_data
"""
if self._parser is not None:
self._parser.feed_data(data) | 代理 feed_data |
def expectedLabelPosition(peptide, labelStateInfo, sequence=None,
modPositions=None):
"""Returns a modification description of a certain label state of a peptide.
:param peptide: Peptide sequence used to calculat the expected label state
modifications
:param labelStateInfo... | Returns a modification description of a certain label state of a peptide.
:param peptide: Peptide sequence used to calculat the expected label state
modifications
:param labelStateInfo: An entry of :attr:`LabelDescriptor.labels` that
describes a label state
:param sequence: unmodified amino... |
def ask_confirmation():
"""Ask for confirmation to the user. Return true if the user confirmed
the execution, false otherwise.
:returns: bool
"""
while True:
print("Do you want to restart these brokers? ", end="")
choice = input().lower()
if choice in ['yes', 'y']:
... | Ask for confirmation to the user. Return true if the user confirmed
the execution, false otherwise.
:returns: bool |
def purge(self):
""" Delete PARTIAL data files and remove torrent from client.
"""
def partial_file(item):
"Filter out partial files"
#print "???", repr(item)
return item.completed_chunks < item.size_chunks
self.cull(file_filter=partial_file, attrs=["... | Delete PARTIAL data files and remove torrent from client. |
def QueueResponse(self, response, timestamp=None):
"""Queues the message on the flow's state."""
if timestamp is None:
timestamp = self.frozen_timestamp
self.response_queue.append((response, timestamp)) | Queues the message on the flow's state. |
def _connect_to_ec2(region, credentials):
"""
:param region: The region of AWS to connect to.
:param EC2Credentials credentials: The credentials to use to authenticate
with EC2.
:return: a connection object to AWS EC2
"""
conn = boto.ec2.connect_to_region(
region,
aws_ac... | :param region: The region of AWS to connect to.
:param EC2Credentials credentials: The credentials to use to authenticate
with EC2.
:return: a connection object to AWS EC2 |
def is_continuous(docgraph, dominating_node):
"""return True, if the tokens dominated by the given node are all adjacent"""
first_onset, last_offset = get_span_offsets(docgraph, dominating_node)
span_range = xrange(first_onset, last_offset+1)
token_offsets = (docgraph.get_offsets(tok)
... | return True, if the tokens dominated by the given node are all adjacent |
def load(self, id=None):
"""Load from database. Old values will be discarded."""
if id is not None:
# We are asked to change our ID to something else
self.reset()
self._setID(id)
if not self._new and self._validID():
self._loadDB()
self._up... | Load from database. Old values will be discarded. |
def as_array(self, transpose=False, items=None):
"""Convert the blockmanager data into an numpy array.
Parameters
----------
transpose : boolean, default False
If True, transpose the return array
items : list of strings or None
Names of block items that w... | Convert the blockmanager data into an numpy array.
Parameters
----------
transpose : boolean, default False
If True, transpose the return array
items : list of strings or None
Names of block items that will be included in the returned
array. ``None`` ... |
def build_authorization_endpoint(self, request, disable_sso=None):
"""
This function returns the ADFS authorization URL.
Args:
request(django.http.request.HttpRequest): A django Request object
disable_sso(bool): Whether to disable single sign-on and force the ADFS server... | This function returns the ADFS authorization URL.
Args:
request(django.http.request.HttpRequest): A django Request object
disable_sso(bool): Whether to disable single sign-on and force the ADFS server to show a login prompt.
Returns:
str: The redirect URI |
def rotate(self, count=1, with_pane_before_only=False, with_pane_after_only=False):
"""
Rotate panes.
When `with_pane_before_only` or `with_pane_after_only` is True, only rotate
with the pane before/after the active pane.
"""
# Create (split, index, pane, weight) tuples.
... | Rotate panes.
When `with_pane_before_only` or `with_pane_after_only` is True, only rotate
with the pane before/after the active pane. |
def db_from_dataframes(
db_filename,
dataframes,
primary_keys={},
indices={},
subdir=None,
overwrite=False,
version=1):
"""
Create a sqlite3 database from a collection of DataFrame objects
Parameters
----------
db_filename : str
Name o... | Create a sqlite3 database from a collection of DataFrame objects
Parameters
----------
db_filename : str
Name of database file to create
dataframes : dict
Dictionary from table names to DataFrame objects
primary_keys : dict, optional
Name of primary key column for each tab... |
def print_yielded(func):
"""
Convert a generator into a function that prints all yielded elements
>>> @print_yielded
... def x():
... yield 3; yield None
>>> x()
3
None
"""
print_all = functools.partial(map, print)
print_results = compose(more_itertools.recipes.consume, print_all, func)
return functool... | Convert a generator into a function that prints all yielded elements
>>> @print_yielded
... def x():
... yield 3; yield None
>>> x()
3
None |
def add_permission(self):
"""Add permission to Lambda for the API Trigger."""
statement_id = '{}_api_{}'.format(self.app_name, self.trigger_settings['api_name'])
principal = 'apigateway.amazonaws.com'
lambda_alias_arn = get_lambda_alias_arn(self.app_name, self.env, self.region)
l... | Add permission to Lambda for the API Trigger. |
def _storage_attach(self, params):
"""
Change storage medium in this VM.
:param params: params to use with sub-command storageattach
"""
args = shlex.split(params)
yield from self.manager.execute("storageattach", [self._vmname] + args) | Change storage medium in this VM.
:param params: params to use with sub-command storageattach |
def pool_define(name,
ptype,
target=None,
permissions=None,
source_devices=None,
source_dir=None,
source_adapter=None,
source_hosts=None,
source_auth=None,
source_name=None,
... | Create libvirt pool.
:param name: Pool name
:param ptype: Pool type. See `libvirt documentation
<https://libvirt.org/storage.html>`_ for the possible values.
:param target: Pool full path target
:param permissions:
Permissions to set on the target folder. This is mostly used for
... |
def dump(self, indentation=0):
"""Returns a string representation of the structure."""
dump = []
dump.append('[{0}]'.format(self.name))
printable_bytes = [ord(i) for i in string.printable if i not in string.whitespace]
# Refer to the __set_format__ method for an explanation
... | Returns a string representation of the structure. |
def getData(self,exten=None):
""" Return just the data array from the specified extension
fileutil is used instead of fits to account for non-
FITS input images. openImage returns a fits object.
"""
if exten.lower().find('sci') > -1:
# For SCI extensions, the ... | Return just the data array from the specified extension
fileutil is used instead of fits to account for non-
FITS input images. openImage returns a fits object. |
def next_retrieve_group_item(self, last_item=None, entry=None):
"""Return the item to start from in next reviews group."""
next_item = None
gerrit_version = self.version
if gerrit_version[0] == 2 and gerrit_version[1] > 9:
if last_item is None:
next_item = ... | Return the item to start from in next reviews group. |
def GetData(fitsfile, EPIC, campaign, clobber=False,
saturation_tolerance=-0.1,
bad_bits=[1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17],
get_hires=False, get_nearby=False,
aperture=None, **kwargs):
'''
Returns a :py:obj:`DataContainer` instance with the
r... | Returns a :py:obj:`DataContainer` instance with the
raw data for the target.
:param str fitsfile: The full raw target pixel file path
:param bool clobber: Overwrite existing files? Default :py:obj:`False`
:param float saturation_tolerance: Target is considered saturated \
if flux is within t... |
def direct_perms_for_user(cls, instance, user, db_session=None):
"""
returns permissions that given user has for this resource
without ones inherited from groups that user belongs to
:param instance:
:param user:
:param db_session:
:return:
"""
... | returns permissions that given user has for this resource
without ones inherited from groups that user belongs to
:param instance:
:param user:
:param db_session:
:return: |
def get_decimal_time(self):
'''
Returns the time of the catalogue as a decimal
'''
return decimal_time(self.data['year'],
self.data['month'],
self.data['day'],
self.data['hour'],
... | Returns the time of the catalogue as a decimal |
def is_location(v) -> (bool, str):
"""
Boolean function for checking if v is a location format
Args:
v:
Returns: bool
"""
def convert2float(value):
try:
float_num = float(value)
return float_num
except... | Boolean function for checking if v is a location format
Args:
v:
Returns: bool |
def winapi(context, names):
"""Query Win32 API declarations.
Windows database must be prepared before using this.
"""
logging.info(_('Entering winapi mode'))
sense = context.obj['sense']
none = True
for name in names:
code = sense.query_args(name)
if code:
none =... | Query Win32 API declarations.
Windows database must be prepared before using this. |
def _uniform_dist(self, spread, total):
""" Produce a uniform distribution of `total` across a list of
`spread` size. The result is non-random and uniform. """
fraction, fixed_increment = math.modf(total / spread)
fixed_increment = int(fixed_increment)
balance = 0
dist = ... | Produce a uniform distribution of `total` across a list of
`spread` size. The result is non-random and uniform. |
def maybe_dotted(module, throw=True):
""" If ``module`` is a dotted string pointing to the module,
imports and returns the module object.
"""
try:
return Configurator().maybe_dotted(module)
except ImportError as e:
err = '%s not found. %s' % (module, e)
if throw:
... | If ``module`` is a dotted string pointing to the module,
imports and returns the module object. |
def state_size(self):
"""State size of the LSTMStateTuple."""
return (LSTMStateTuple(self._num_units, self._num_units) if self._state_is_tuple else 2 * self._num_units) | State size of the LSTMStateTuple. |
def blpop(self, keys, timeout=0):
"""
LPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to LPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeo... | LPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to LPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely. |
def putout(ofile, keylist, Rec):
"""
writes out a magic format record to ofile
"""
pmag_out = open(ofile, 'a')
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key]).strip()
except:
print(key, Rec[key])
# raw_input... | writes out a magic format record to ofile |
def cmd(self, cmd, verbose=False):
"""Executes the specified command on the remote host.
The cmd must be format safe, this means { and } must be doubled, thusly:
echo /var/local/maildir/{{cur,new}}
the cmd can include the format word 'maildir' to be replaced
by self.director... | Executes the specified command on the remote host.
The cmd must be format safe, this means { and } must be doubled, thusly:
echo /var/local/maildir/{{cur,new}}
the cmd can include the format word 'maildir' to be replaced
by self.directory. eg:
echo {maildir}/{{cur,new}} |
def coords2px(y, x):
""" Transforming coordinates to pixels.
Arguments:
y : np array
vector in which (y[0], y[1]) and (y[2], y[3]) are the
the corners of a bounding box.
x : image
an image
Returns:
Y : image
of shape x.shape
"""
... | Transforming coordinates to pixels.
Arguments:
y : np array
vector in which (y[0], y[1]) and (y[2], y[3]) are the
the corners of a bounding box.
x : image
an image
Returns:
Y : image
of shape x.shape |
def exclusive_match(self, field, value):
"""Match exactly the given value(s), with no other data in the field.
Arguments:
field (str): The field to check for the value.
The field must be namespaced according to Elasticsearch rules
using the dot syntax... | Match exactly the given value(s), with no other data in the field.
Arguments:
field (str): The field to check for the value.
The field must be namespaced according to Elasticsearch rules
using the dot syntax.
For example, ``"mdf.source_nam... |
def tags(self):
'''Display tag information for all samples in database'''
tags = self.workbench.get_all_tags()
if not tags:
return
tag_df = pd.DataFrame(tags)
tag_df = self.vectorize(tag_df, 'tags')
print '\n%sSamples in Database%s' % (color.LightPurple, color... | Display tag information for all samples in database |
def get_local_version(sigdir, sig):
"""Get the local version of a signature"""
version = None
filename = os.path.join(sigdir, '%s.cvd' % sig)
if os.path.exists(filename):
cmd = ['sigtool', '-i', filename]
sigtool = Popen(cmd, stdout=PIPE, stderr=PIPE)
while True:
line... | Get the local version of a signature |
def _netinfo_freebsd_netbsd():
'''
Get process information for network connections using sockstat
'''
ret = {}
# NetBSD requires '-n' to disable port-to-service resolution
out = __salt__['cmd.run'](
'sockstat -46 {0} | tail -n+2'.format(
'-n' if __grains__['kernel'] == 'NetBS... | Get process information for network connections using sockstat |
def write_tsv(self, path, encoding='UTF-8'):
"""Write expression matrix to a tab-delimited text file.
Parameters
----------
path: str
The path of the output file.
encoding: str, optional
The file encoding. ("UTF-8")
Returns
-------
... | Write expression matrix to a tab-delimited text file.
Parameters
----------
path: str
The path of the output file.
encoding: str, optional
The file encoding. ("UTF-8")
Returns
-------
None |
def handle_cmd_options():
'''
Get the options from the command line.
'''
parser = OptionParser()
parser.add_option("-s", "--silent", action="store_true", dest="silent",
help="print any warnings", default=False)
(options, args) = parser.parse_args()
return options, args | Get the options from the command line. |
def visibility_changed(self, enable):
"""
Dock widget visibility has changed.
"""
if self.dockwidget is None:
return
if enable:
self.dockwidget.raise_()
widget = self.get_focus_widget()
if widget is not None and self.undocked_window... | Dock widget visibility has changed. |
def FromTXOutputsConfirmed(outputs):
"""
Get unspent outputs from a list of transaction outputs.
Args:
outputs (list): of neo.Core.TX.Transaction.TransactionOutput items.
Returns:
UnspentCoinState:
"""
uns = UnspentCoinState()
uns.Items =... | Get unspent outputs from a list of transaction outputs.
Args:
outputs (list): of neo.Core.TX.Transaction.TransactionOutput items.
Returns:
UnspentCoinState: |
def _get_best_prediction(self, record, train=True):
"""
Gets the prediction from the tree with the lowest mean absolute error.
"""
if not self.trees:
return
best = (+1e999999, None)
for tree in self.trees:
best = min(best, (tree.mae.mean, tree))
... | Gets the prediction from the tree with the lowest mean absolute error. |
def finish (self):
"""Wait for checker threads to finish."""
if not self.urlqueue.empty():
# This happens when all checker threads died.
self.cancel()
for t in self.threads:
t.stop() | Wait for checker threads to finish. |
def get_formatter(name):
"""Return the named formatter function. See the function
"set_formatter" for details.
"""
if name in ('self', 'instance', 'this'):
return af_self
elif name == 'class':
return af_class
elif name in ('named', 'param', 'parameter'):
return af_named
... | Return the named formatter function. See the function
"set_formatter" for details. |
def get_info(df, group, info=['mean', 'std']):
"""
Aggregate mean and std with the given group.
"""
agg = df.groupby(group).agg(info)
agg.columns = agg.columns.droplevel(0)
return agg | Aggregate mean and std with the given group. |
def get_option(env_name, section, opt_name, default=None):
"""Return a configuration setting from environment var or .pyftpsyncrc"""
val = os.environ.get(env_name)
if val is None:
try:
val = _pyftpsyncrc_parser.get(section, opt_name)
except (compat.configparser.NoSectionError, co... | Return a configuration setting from environment var or .pyftpsyncrc |
def remove_project(self, path):
"""
Removes a project.
:param path: Project path.
:type path: unicode
:return: Method success.
:rtype: bool
"""
project_node = foundations.common.get_first_item(self.__model.get_project_nodes(path))
if not project_... | Removes a project.
:param path: Project path.
:type path: unicode
:return: Method success.
:rtype: bool |
def image_load_time(self):
"""
Returns aggregate image load time for all pages.
"""
load_times = self.get_load_times('image')
return round(mean(load_times), self.decimal_precision) | Returns aggregate image load time for all pages. |
def load(self,
source_list: Iterable[List[str]],
target_sentences: Iterable[List[Any]],
num_samples_per_bucket: List[int]) -> 'ParallelDataSet':
"""
Creates a parallel dataset base on source list of strings and target sentences.
Returns a `sockeye.data_io.P... | Creates a parallel dataset base on source list of strings and target sentences.
Returns a `sockeye.data_io.ParallelDataSet`.
:param source_list: Source list of strings (e.g., filenames).
:param target_sentences: Target sentences used to do bucketing.
:param num_samples_per_bucket: Numbe... |
def __construct_list(self, list_value):
""" Loop list/set/tuple and parse values """
array = []
for value in list_value:
array.append(self.__iterate_value(value))
return array | Loop list/set/tuple and parse values |
def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float):
"""Convert an array containing datetime-like data to an array of floats.
Parameters
----------
da : np.array
Input data
offset: Scalar with the same type of array or None
If None, subtract minimum values t... | Convert an array containing datetime-like data to an array of floats.
Parameters
----------
da : np.array
Input data
offset: Scalar with the same type of array or None
If None, subtract minimum values to reduce round off error
datetime_unit: None or any of {'Y', 'M', 'W', 'D', 'h', ... |
def _field_value_html(self, field):
"""Return the html representation of the value of the given field"""
if field in self.fields:
return unicode(self.get(field))
else:
return self.get_timemachine_instance(field)._object_name_html() | Return the html representation of the value of the given field |
def tlg_plaintext_cleanup(text, rm_punctuation=False, rm_periods=False):
"""Remove and substitute post-processing for Greek TLG text.
TODO: Surely more junk to pull out. Please submit bugs!
TODO: {.+?}|\(.+?\) working?
TODO: This is a rather slow now, help in speeding up welcome.
"""
remove_comp... | Remove and substitute post-processing for Greek TLG text.
TODO: Surely more junk to pull out. Please submit bugs!
TODO: {.+?}|\(.+?\) working?
TODO: This is a rather slow now, help in speeding up welcome. |
def has_space(self, length=1, offset=0):
"""Returns boolean if self.pos + length < working string length."""
return self.pos + (length + offset) - 1 < self.length | Returns boolean if self.pos + length < working string length. |
def assert_all_of_selectors(self, selector, *locators, **kwargs):
"""
Asserts that all of the provided selectors are present on the given page or descendants of
the current node. If options are provided, the assertion will check that each locator is
present with those options as well (ot... | Asserts that all of the provided selectors are present on the given page or descendants of
the current node. If options are provided, the assertion will check that each locator is
present with those options as well (other than ``wait``). ::
page.assert_all_of_selectors("custom", "Tom", "Joe... |
def delete(self, id):
"""
Delete the specified label
:param id: the label's ID
:type id: str
:raises: This will raise a
:class:`ServerException<logentries_api.exceptions.ServerException>`
if there is an error from Logentries
"""
return se... | Delete the specified label
:param id: the label's ID
:type id: str
:raises: This will raise a
:class:`ServerException<logentries_api.exceptions.ServerException>`
if there is an error from Logentries |
def transform(self, X):
'''
:X: numpy ndarray
'''
noise = self._noise_func(*self._args, size=X.shape)
results = X + noise
self.relative_noise_size_ = self.relative_noise_size(X, results)
return results | :X: numpy ndarray |
def get_expected_bindings(self):
"""Query the neutron DB for SG->switch interface bindings
Bindings are returned as a dict of bindings for each switch:
{<switch1>: set([(intf1, acl_name, direction),
(intf2, acl_name, direction)]),
<switch2>: set([(intf1, acl_na... | Query the neutron DB for SG->switch interface bindings
Bindings are returned as a dict of bindings for each switch:
{<switch1>: set([(intf1, acl_name, direction),
(intf2, acl_name, direction)]),
<switch2>: set([(intf1, acl_name, direction)]),
...,
} |
def Log(self, format_str, *args):
"""Logs the message using the flow's standard logging.
Args:
format_str: Format string
*args: arguments to the format string
"""
log_entry = rdf_flow_objects.FlowLogEntry(
client_id=self.rdf_flow.client_id,
flow_id=self.rdf_flow.flow_id,
... | Logs the message using the flow's standard logging.
Args:
format_str: Format string
*args: arguments to the format string |
def _private_packages_allowed():
"""
Checks if the current user is allowed to create private packages.
In the public cloud, the user needs to be on a paid plan.
There are no restrictions in other deployments.
"""
if not HAVE_PAYMENTS or TEAM_ID:
return True
customer = _get_or_creat... | Checks if the current user is allowed to create private packages.
In the public cloud, the user needs to be on a paid plan.
There are no restrictions in other deployments. |
def _get_provider_manager(self, osid, local=False):
"""Gets the most appropriate provider manager depending on config."""
return get_provider_manager(osid,
runtime=self._runtime,
proxy=getattr(self, '_proxy', None),
... | Gets the most appropriate provider manager depending on config. |
def stationary_distribution_sensitivity(T, j):
r"""Calculate the sensitivity matrix for entry j the stationary
distribution vector given transition matrix T.
Parameters
----------
T : numpy.ndarray shape = (n, n)
Transition matrix
j : int
entry of stationary distribution for whi... | r"""Calculate the sensitivity matrix for entry j the stationary
distribution vector given transition matrix T.
Parameters
----------
T : numpy.ndarray shape = (n, n)
Transition matrix
j : int
entry of stationary distribution for which the sensitivity is to be computed
Returns
... |
def load(source, semi=None):
"""
Read a variable-property mapping from *source* and return the VPM.
Args:
source: a filename or file-like object containing the VPM
definitions
semi (:class:`~delphin.mrs.semi.SemI`, optional): if provided,
it is passed to the VPM cons... | Read a variable-property mapping from *source* and return the VPM.
Args:
source: a filename or file-like object containing the VPM
definitions
semi (:class:`~delphin.mrs.semi.SemI`, optional): if provided,
it is passed to the VPM constructor
Returns:
a :class:`VP... |
def convert(self, obj):
"""Takes a dict corresponding to the honeybadgerfish JSON blob of the 1.2.* type and
converts it to DIRECT_HONEY_BADGERFISH version. The object is modified in place
and returned.
"""
if self.pristine_if_invalid:
raise NotImplementedError('prist... | Takes a dict corresponding to the honeybadgerfish JSON blob of the 1.2.* type and
converts it to DIRECT_HONEY_BADGERFISH version. The object is modified in place
and returned. |
def get_best(self):
"""Finds the optimal number of features
:return: optimal number of features and ranking
"""
svc = SVC(kernel="linear")
rfecv = RFECV(
estimator=svc,
step=1,
cv=StratifiedKFold(self.y_train, 2),
scoring="log_loss"... | Finds the optimal number of features
:return: optimal number of features and ranking |
def consume(exchange, queue_name, routing_key, callback, app_name):
"""Consume messages from an AMQP queue using a Python callback."""
# The configuration validates these are not null and contain all required keys
# when it is loaded.
bindings = config.conf["bindings"]
queues = config.conf["queues"... | Consume messages from an AMQP queue using a Python callback. |
def ensure_caches_alive(max_retries: int = 100,
retry_timeout: int = 5,
exit_on_failure: bool = True) -> bool:
"""
Checks every cache backend alias in ``settings.CACHES`` until it becomes available. After ``max_retries``
attempts to reach any backend are faile... | Checks every cache backend alias in ``settings.CACHES`` until it becomes available. After ``max_retries``
attempts to reach any backend are failed it returns ``False``. If ``exit_on_failure`` is set it shuts down with
``exit(1)``.
It sets the ``django-docker-helpers:available-check`` key for every cache ba... |
def plot_results(fout_img, goea_results, **kws):
"""Given a list of GOEA results, plot result GOs up to top."""
if "{NS}" not in fout_img:
plt_goea_results(fout_img, goea_results, **kws)
else:
# Plot separately by NS: BP, MF, CC
ns2goea_results = cx.defaultdict(list)
for rec ... | Given a list of GOEA results, plot result GOs up to top. |
def get_attachment_content(self, request, queryset):
"""
Returns the generated file content.
:param request: The request being processed.
:param queryset: The model class being processed.
:return: The report content (usually expressed in raw bytes but could be unicode as well).
... | Returns the generated file content.
:param request: The request being processed.
:param queryset: The model class being processed.
:return: The report content (usually expressed in raw bytes but could be unicode as well). |
def findFileParam(self, comp):
"""Finds the filename auto-parameter that component *comp* is
in, and returns all the filenames for that parameter. Notes this
assumes that *comp* will only be in a single filename auto-parameter.
:param comp: Component to search parameter membership for
... | Finds the filename auto-parameter that component *comp* is
in, and returns all the filenames for that parameter. Notes this
assumes that *comp* will only be in a single filename auto-parameter.
:param comp: Component to search parameter membership for
:type comp: :class:`AbstractStimulu... |
def _round(self, number):
"""
Helper function for rounding-as-taught-in-school (X.5 rounds to X+1 if positive).
Python 3 now rounds 0.5 to whichever side is even (i.e. 2.5 rounds to 2).
:param int number: a float to round.
:return: closest integer to number, rounding tie... | Helper function for rounding-as-taught-in-school (X.5 rounds to X+1 if positive).
Python 3 now rounds 0.5 to whichever side is even (i.e. 2.5 rounds to 2).
:param int number: a float to round.
:return: closest integer to number, rounding ties away from 0. |
def get_sdf(identifier, namespace='cid', domain='compound',operation=None, searchtype=None, **kwargs):
"""Request wrapper that automatically parses SDF response and supresses NotFoundError."""
try:
return get(identifier, namespace, domain, operation, 'SDF', searchtype, **kwargs).decode()
except NotF... | Request wrapper that automatically parses SDF response and supresses NotFoundError. |
def rs(data, n, unbiased=True):
"""
Calculates an individual R/S value in the rescaled range approach for
a given n.
Note: This is just a helper function for hurst_rs and should not be called
directly.
Args:
data (array-like of float):
time series
n (float):
size of the subseries in wh... | Calculates an individual R/S value in the rescaled range approach for
a given n.
Note: This is just a helper function for hurst_rs and should not be called
directly.
Args:
data (array-like of float):
time series
n (float):
size of the subseries in which data should be split
Kwargs:
... |
def page(request, slug, template=u"pages/page.html", extra_context=None):
"""
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other... | Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
w... |
def do_request(self, line):
"""request <peer> <method> <params>
send a msgpack-rpc request and print a response.
<params> is a python code snippet, it should be eval'ed to a list.
"""
def f(p, method, params):
result = p.call(method, params)
print("RESULT... | request <peer> <method> <params>
send a msgpack-rpc request and print a response.
<params> is a python code snippet, it should be eval'ed to a list. |
def draw_text(self, video_name, out, start, end, x, y, text,
color='0xFFFFFF', show_background=0,
background_color='0x000000', size=16):
"""
Draws text over a video
@param video_name : name of video input file
@param out : name of video output file
... | Draws text over a video
@param video_name : name of video input file
@param out : name of video output file
@param start : start timecode to draw text hh:mm:ss
@param end : end timecode to draw text hh:mm:ss
@param x : x position of text (px)
@param y : y position of text... |
def is_ancestor(self, ancestor_rev, rev):
"""Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
:param rev: Rev to test against ancestor_rev
:return: ``True``, ancestor_rev is an accestor to rev.
"""
try:
self.gi... | Check if a commit is an ancestor of another
:param ancestor_rev: Rev which should be an ancestor
:param rev: Rev to test against ancestor_rev
:return: ``True``, ancestor_rev is an accestor to rev. |
def get_conf(cls, builder, doctree=None):
"""Return a dictionary of slide configuration for this doctree."""
# set up the default conf
result = {
'theme': builder.config.slide_theme,
'autoslides': builder.config.autoslides,
'slide_classes': [],
}
... | Return a dictionary of slide configuration for this doctree. |
def _get_error_page_callback(self):
"""Return an error page for the current response status."""
if self.response.status in self._error_handlers:
return self._error_handlers[self.response.status]
elif None in self._error_handlers:
return self._error_handlers[None]
... | Return an error page for the current response status. |
def l2traceroute_result_output_l2traceroutedone(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
l2traceroute_result = ET.Element("l2traceroute_result")
config = l2traceroute_result
output = ET.SubElement(l2traceroute_result, "output")
l2t... | Auto Generated Code |
def _registerPickleType(name, typedef):
'''
Register a type with the specified name. After registration, NamedStruct with this type
(and any sub-types) can be successfully pickled and transfered.
'''
NamedStruct._pickleNames[typedef] = name
NamedStruct._pickleTypes[name] ... | Register a type with the specified name. After registration, NamedStruct with this type
(and any sub-types) can be successfully pickled and transfered. |
def rename(self, oldkey, newkey):
"""
Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments.
"""
if oldkey in self.scal... | Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments. |
def reload_input_standby(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
reload = ET.Element("reload")
config = reload
input = ET.SubElement(reload, "input")
standby = ET.SubElement(input, "standby")
callback = kwargs.pop('callba... | Auto Generated Code |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.