code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_income_statement(self, **kwargs):
"""Income Statement
Pulls income statement data. Available quarterly (4 quarters) or
annually (4 years).
Reference: https://iexcloud.io/docs/api/#income-statement
Data Weighting: ``1000`` per symbol per period
.. war... | Income Statement
Pulls income statement data. Available quarterly (4 quarters) or
annually (4 years).
Reference: https://iexcloud.io/docs/api/#income-statement
Data Weighting: ``1000`` per symbol per period
.. warning:: This endpoint is only available using IEX Cloud... |
def add_validation_patch(self, patch):
"""
Extracts ground truth and classification results from the EOPatch and
aggregates the results.
"""
# 2. Convert 8-bit mask
self._transform_truth(patch)
# 3. Count truth labeled pixels
self._count_truth_pixels()
... | Extracts ground truth and classification results from the EOPatch and
aggregates the results. |
def list_proxy(root_package = 'vlcp'):
'''
Walk through all the sub modules, find subclasses of vlcp.server.module._ProxyModule,
list their default values
'''
proxy_dict = OrderedDict()
pkg = __import__(root_package, fromlist=['_'])
for imp, module, _ in walk_packages(pkg.__path__, root_pack... | Walk through all the sub modules, find subclasses of vlcp.server.module._ProxyModule,
list their default values |
def _cmp(self, other):
"""
Compare two Project Haystack version strings, then return
-1 if self < other,
0 if self == other
or 1 if self > other.
"""
if not isinstance(other, Version):
other = Version(other)
num1 = self.version_num... | Compare two Project Haystack version strings, then return
-1 if self < other,
0 if self == other
or 1 if self > other. |
def generate_data(self, data_dir, tmp_dir, task_id=-1):
"""Generates training/dev data.
Args:
data_dir: a string
tmp_dir: a string
task_id: an optional integer
Returns:
shard or shards for which data was generated.
"""
tf.logging.info("generate_data task_id=%s" % task_id)
... | Generates training/dev data.
Args:
data_dir: a string
tmp_dir: a string
task_id: an optional integer
Returns:
shard or shards for which data was generated. |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'gateways') and self.gateways is not None:
_dict['gateways'] = [x._to_dict() for x in self.gateways]
return _dict | Return a json dictionary representing this model. |
def build_cpp(build_context, target, compiler_config, workspace_dir):
"""Compile and link a C++ binary for `target`."""
rmtree(workspace_dir)
binary = join(*split(target.name))
objects = link_cpp_artifacts(build_context, target, workspace_dir, True)
buildenv_workspace = build_context.conf.host_to_bu... | Compile and link a C++ binary for `target`. |
def merge_perchrom_vcfs(job, perchrom_vcfs, tool_name, univ_options):
"""
Merge per-chromosome vcf files into a single genome level vcf.
:param dict perchrom_vcfs: Dictionary with chromosome name as key and fsID of the corresponding
vcf as value
:param str tool_name: Name of the tool that ge... | Merge per-chromosome vcf files into a single genome level vcf.
:param dict perchrom_vcfs: Dictionary with chromosome name as key and fsID of the corresponding
vcf as value
:param str tool_name: Name of the tool that generated the vcfs
:returns: fsID for the merged vcf
:rtype: toil.fileStore.... |
def create(host, port, result_converter=None, testcase_converter=None, args=None):
"""
Function which is called by Icetea to create an instance of the cloud client. This function
must exists.
This function myust not return None. Either return an instance of Client or raise.
"""
return SampleClie... | Function which is called by Icetea to create an instance of the cloud client. This function
must exists.
This function myust not return None. Either return an instance of Client or raise. |
def get_dashboards(self):
'''**Description**
Return the list of dashboards available under the given user account. This includes the dashboards created by the user and the ones shared with her by other users.
**Success Return Value**
A dictionary containing the list of available... | **Description**
Return the list of dashboards available under the given user account. This includes the dashboards created by the user and the ones shared with her by other users.
**Success Return Value**
A dictionary containing the list of available sampling intervals.
**Examp... |
def extractColumns(TableName,SourceParameterName,ParameterFormats,ParameterNames=None,FixCol=False):
"""
INPUT PARAMETERS:
TableName: name of source table (required)
SourceParameterName: name of source column to process (required)
ParameterFormats: c for... | INPUT PARAMETERS:
TableName: name of source table (required)
SourceParameterName: name of source column to process (required)
ParameterFormats: c formats of unpacked parameters (required)
ParameterNames: list of resulting parameter names (optiona... |
def get_areas(self, area_id=None, **kwargs):
"""
Alias for get_elements() but filter the result by Area
:param area_id: The Id of the area
:type area_id: Integer
:return: List of elements
"""
return self.get_elements(Area, elem_id=area_id, **kwargs) | Alias for get_elements() but filter the result by Area
:param area_id: The Id of the area
:type area_id: Integer
:return: List of elements |
async def send_script(self, conn_id, data):
"""Send a a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`.
"""
progress_callback = functools.partial(_on_progress, self, 'script', conn_id)
resp = await self._execute(self._adapter.send_script_sync, conn_id, data,... | Send a a script to a device.
See :meth:`AbstractDeviceAdapter.send_script`. |
def report_hit_filename(zipfilename: str, contentsfilename: str,
show_inner_file: bool) -> None:
"""
For "hits": prints either the ``.zip`` filename, or the ``.zip`` filename
and the inner filename.
Args:
zipfilename: filename of the ``.zip`` file
contentsfilenam... | For "hits": prints either the ``.zip`` filename, or the ``.zip`` filename
and the inner filename.
Args:
zipfilename: filename of the ``.zip`` file
contentsfilename: filename of the inner file
show_inner_file: if ``True``, show both; if ``False``, show just the
``.zip`` filen... |
def bam2fastq(job, bamfile, univ_options):
"""
split an input bam to paired fastqs.
ARGUMENTS
1. bamfile: Path to a bam file
2. univ_options: Dict of universal arguments used by almost all tools
univ_options
|- 'dockerhub': <dockerhub to use>
+- 'java_Xmx': ... | split an input bam to paired fastqs.
ARGUMENTS
1. bamfile: Path to a bam file
2. univ_options: Dict of universal arguments used by almost all tools
univ_options
|- 'dockerhub': <dockerhub to use>
+- 'java_Xmx': value for max heap passed to java |
def combinations(l):
"""Pure-Python implementation of itertools.combinations(l, 2)."""
result = []
for x in xrange(len(l) - 1):
ls = l[x + 1:]
for y in ls:
result.append((l[x], y))
return result | Pure-Python implementation of itertools.combinations(l, 2). |
def find_worst(rho, pval, m=1, rlim=.10, plim=.35):
"""Find the N "worst", i.e. insignificant/random and low, correlations
Parameters
----------
rho : ndarray, list
1D array with correlation coefficients
pval : ndarray, list
1D array with p-values
m : int
The desired n... | Find the N "worst", i.e. insignificant/random and low, correlations
Parameters
----------
rho : ndarray, list
1D array with correlation coefficients
pval : ndarray, list
1D array with p-values
m : int
The desired number of indicies to return
(How many "worst" corre... |
def error_count(self):
"""Returns the total number of validation errors for this row."""
count = 0
for error_list in self.error_dict.values():
count += len(error_list)
return count | Returns the total number of validation errors for this row. |
def _setStartSegment(self, segmentIndex, **kwargs):
"""
Subclasses may override this method.
"""
segments = self.segments
oldStart = segments[-1]
oldLast = segments[0]
# If the contour ends with a curve on top of a move,
# delete the move.
if oldLa... | Subclasses may override this method. |
def is_dsub_operation(op):
"""Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have alw... | Determine if a pipelines operation is a dsub request.
We don't have a rigorous way to identify an operation as being submitted
by dsub. Our best option is to check for certain fields that have always
been part of dsub operations.
- labels: job-id, job-name, and user-id have always existed. The dsub-version
... |
def get_rotations(self):
"""Return all rotations, including inversions for
centrosymmetric crystals."""
if self.centrosymmetric:
return np.vstack((self.rotations, -self.rotations))
else:
return self.rotations | Return all rotations, including inversions for
centrosymmetric crystals. |
def wrap_url(s, l):
"""Wrap a URL string"""
parts = s.split('/')
if len(parts) == 1:
return parts[0]
else:
i = 0
lines = []
for j in range(i, len(parts) + 1):
tv = '/'.join(parts[i:j])
nv = '/'.join(parts[i:j + 1])
if len(nv) > l or n... | Wrap a URL string |
def report(self, item_id, report_format="json"):
"""Retrieves the specified report for the analyzed item, referenced by item_id.
Available formats include: json, html, all, dropped, package_files.
:type item_id: int
:param item_id: Task ID number
:type report_form... | Retrieves the specified report for the analyzed item, referenced by item_id.
Available formats include: json, html, all, dropped, package_files.
:type item_id: int
:param item_id: Task ID number
:type report_format: str
:param report_format: Return format
... |
def _group(self, element):
"""Parses the XML element as a group of [unknown] number of lines."""
for v in _get_xml_version(element):
if "name" in element.attrib:
g = TemplateGroup(element, self.versions[v].comment)
self.versions[v].entries[g.identifier] = g
... | Parses the XML element as a group of [unknown] number of lines. |
def process(self):
""" Calls the external cleanser scripts to (optionally) purge the meta data and then
send the contents of the dropbox via email.
"""
if self.num_attachments > 0:
self.status = u'100 processor running'
fs_dirty_archive = self._create_backup(... | Calls the external cleanser scripts to (optionally) purge the meta data and then
send the contents of the dropbox via email. |
def from_folder(cls, path:PathOrStr, train:str='train', valid:str='valid', test:Optional[str]=None,
classes:Collection[Any]=None, tokenizer:Tokenizer=None, vocab:Vocab=None, chunksize:int=10000, max_vocab:int=60000,
min_freq:int=2, mark_fields:bool=False, include_bos:bool=True, i... | Create a `TextDataBunch` from text files in folders. |
def _setup_profiles(self, conversion_profiles):
'''
Add given conversion profiles checking for invalid profiles
'''
# Check for invalid profiles
for key, path in conversion_profiles.items():
if isinstance(path, str):
path = (path, )
for lef... | Add given conversion profiles checking for invalid profiles |
def set_title(self,s, panel='top'):
"set plot title"
panel = self.get_panel(panel)
panel.set_title(s) | set plot title |
def get_child_bank_ids(self, bank_id):
"""Gets the child ``Ids`` of the given bank.
arg: bank_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the bank
raise: NotFound - ``bank_id`` is not found
raise: NullArgument - ``bank_id`` is ``null``
... | Gets the child ``Ids`` of the given bank.
arg: bank_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the bank
raise: NotFound - ``bank_id`` is not found
raise: NullArgument - ``bank_id`` is ``null``
raise: OperationFailed - unable to complete... |
def on_channel_closed(self, channel, reply_code, reply_text):
"""
Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameter... | Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameters. In this case, we'll close the connection
to shutdown the object.
... |
def run(self):
"""Thread main loop"""
retries = 0
try:
while not self._stopping:
try:
data = self.notifications_api.long_poll_notifications()
except mds.rest.ApiException as e:
# An HTTP 410 can be raised when st... | Thread main loop |
def get_random_name(retry=False):
"""
generates a random name from the list of adjectives and birds in this package
formatted as "adjective_surname". For example 'loving_sugarbird'. If retry is non-zero, a random
integer between 0 and 100 will be added to the end of the name, e.g `loving_sugarbird3`
... | generates a random name from the list of adjectives and birds in this package
formatted as "adjective_surname". For example 'loving_sugarbird'. If retry is non-zero, a random
integer between 0 and 100 will be added to the end of the name, e.g `loving_sugarbird3` |
def new(params, event_shape=(), validate_args=False, name=None):
"""Create the distribution instance from a `params` vector."""
with tf.compat.v1.name_scope(name, 'IndependentLogistic',
[params, event_shape]):
params = tf.convert_to_tensor(value=params, name='params')
... | Create the distribution instance from a `params` vector. |
def gen_data_files(src_dir):
"""
generates a list of files contained in the given directory (and its
subdirectories) in the format required by the ``package_data`` parameter
of the ``setuptools.setup`` function.
Parameters
----------
src_dir : str
(relative) path to the directory st... | generates a list of files contained in the given directory (and its
subdirectories) in the format required by the ``package_data`` parameter
of the ``setuptools.setup`` function.
Parameters
----------
src_dir : str
(relative) path to the directory structure containing the files to
b... |
def safe_display_name(numobj, lang, script=None, region=None):
"""Gets the name of the carrier for the given PhoneNumber object only when
it is 'safe' to display to users. A carrier name is onsidered safe if the
number is valid and for a region that doesn't support mobile number
portability (http://en.... | Gets the name of the carrier for the given PhoneNumber object only when
it is 'safe' to display to users. A carrier name is onsidered safe if the
number is valid and for a region that doesn't support mobile number
portability (http://en.wikipedia.org/wiki/Mobile_number_portability).
This function exp... |
def reorder_categories(self, new_categories, ordered=None, inplace=False):
"""
Reorder categories as specified in new_categories.
`new_categories` need to include all old categories and no new category
items.
Parameters
----------
new_categories : Index-like
... | Reorder categories as specified in new_categories.
`new_categories` need to include all old categories and no new category
items.
Parameters
----------
new_categories : Index-like
The categories in new order.
ordered : bool, optional
Whether or not... |
def sort_return_tuples(response, **options):
"""
If ``groups`` is specified, return the response as a list of
n-element tuples with n being the value found in options['groups']
"""
if not response or not options.get('groups'):
return response
n = options['groups']
return list(izip(*[... | If ``groups`` is specified, return the response as a list of
n-element tuples with n being the value found in options['groups'] |
def _key(self, username, frozen=False):
"""Translate a username into a key for Redis."""
if frozen:
return self.frozen + username
return self.prefix + username | Translate a username into a key for Redis. |
def get_app_logger_color(appname, app_log_level=logging.INFO, log_level=logging.WARN, logfile=None):
""" Configure the logging for an app using reliure (it log's both the app and reliure lib)
:param appname: the name of the application to log
:parap app_log_level: log level for the app
:param log_level... | Configure the logging for an app using reliure (it log's both the app and reliure lib)
:param appname: the name of the application to log
:parap app_log_level: log level for the app
:param log_level: log level for the reliure
:param logfile: file that store the log, time rotating file (by day), no if N... |
def plot_di_mean_ellipse(dictionary, fignum=1, color='k', marker='o', markersize=20, label='', legend='no'):
"""
Plot a mean direction (declination, inclination) confidence ellipse.
Parameters
-----------
dictionary : a dictionary generated by the pmag.dobingham or pmag.dokent funcitons
"""
... | Plot a mean direction (declination, inclination) confidence ellipse.
Parameters
-----------
dictionary : a dictionary generated by the pmag.dobingham or pmag.dokent funcitons |
def Xor(bytestr, key):
"""Returns a `bytes` object where each byte has been xored with key."""
# TODO(hanuszczak): Remove this import when string migration is done.
# pytype: disable=import-error
from builtins import bytes # pylint: disable=redefined-builtin, g-import-not-at-top
# pytype: enable=import-error... | Returns a `bytes` object where each byte has been xored with key. |
def _run_check(self, check_method, ds, max_level):
"""
Runs a check and appends a result to the values list.
@param bound method check_method: a given check method
@param netCDF4 dataset ds
@param int max_level: check level
@return list: list of Result objects
"""... | Runs a check and appends a result to the values list.
@param bound method check_method: a given check method
@param netCDF4 dataset ds
@param int max_level: check level
@return list: list of Result objects |
def update_query(self, *args, **kwargs):
"""Return a new URL with query part updated."""
s = self._get_str_query(*args, **kwargs)
new_query = MultiDict(parse_qsl(s, keep_blank_values=True))
query = MultiDict(self.query)
query.update(new_query)
return URL(self._val._repla... | Return a new URL with query part updated. |
def determine_master(port=4000):
"""Determine address of master so that workers
can connect to it. If the environment variable
SPARK_LOCAL_IP is set, that address will be used.
:param port: port on which the application runs
:return: Master address
Example usage:
SPARK_LOCAL_IP=127.0.0... | Determine address of master so that workers
can connect to it. If the environment variable
SPARK_LOCAL_IP is set, that address will be used.
:param port: port on which the application runs
:return: Master address
Example usage:
SPARK_LOCAL_IP=127.0.0.1 spark-submit --master \
l... |
def update_course_enrollment(self, email, course_url, purchase_incomplete, mode, unit_cost=None, course_id=None,
currency=None, message_id=None, site_code=None, sku=None):
"""Adds/updates Sailthru when a user adds to cart/purchases/upgrades a course
Args:
email(str): The u... | Adds/updates Sailthru when a user adds to cart/purchases/upgrades a course
Args:
email(str): The user's email address
course_url(str): Course home page url
purchase_incomplete(boolean): True if adding to cart
mode(string): enroll mode (audit, verification, ...)
unit_cost(de... |
def unalias(self, annotationtype, alias):
"""Return the set for an alias (if applicable, raises an exception otherwise)"""
if inspect.isclass(annotationtype): annotationtype = annotationtype.ANNOTATIONTYPE
return self.alias_set[annotationtype][alias] | Return the set for an alias (if applicable, raises an exception otherwise) |
def import_sql_select(connection_url, select_query, username, password, optimize=True,
use_temp_table=None, temp_table_name=None, fetch_mode=None):
"""
Import the SQL table that is the result of the specified SQL query to H2OFrame in memory.
Creates a temporary SQL table from the spe... | Import the SQL table that is the result of the specified SQL query to H2OFrame in memory.
Creates a temporary SQL table from the specified sql_query.
Runs multiple SELECT SQL queries on the temporary table concurrently for parallel ingestion, then drops the table.
Be sure to start the h2o.jar in the termin... |
def makeServiceDocXML(title, collections):
"""
Make an ATOM service doc here. The 'collections' parameter is a list of
dictionaries, with the keys of 'title', 'accept' and 'categories'
being valid
"""
serviceTag = etree.Element("service")
workspaceTag = etree.SubElement(serviceTag, "workspa... | Make an ATOM service doc here. The 'collections' parameter is a list of
dictionaries, with the keys of 'title', 'accept' and 'categories'
being valid |
def start_listener_thread(self, timeout_ms: int = 30000, exception_handler: Callable = None):
"""
Start a listener greenlet to listen for events in the background.
Args:
timeout_ms: How long to poll the Home Server for before retrying.
exception_handler: Optional exceptio... | Start a listener greenlet to listen for events in the background.
Args:
timeout_ms: How long to poll the Home Server for before retrying.
exception_handler: Optional exception handler function which can
be used to handle exceptions in the caller thread. |
def uncloak(request):
"""
Undo a masquerade session and redirect the user back to where they started
cloaking from (or where ever the "next" POST parameter points)
"""
try:
del request.session[SESSION_USER_KEY]
except KeyError:
pass # who cares
# figure out where to redirect... | Undo a masquerade session and redirect the user back to where they started
cloaking from (or where ever the "next" POST parameter points) |
def attribute(element, attribute, default=None):
"""
Returns the value of an attribute, or a default if it's not defined
:param element: The XML Element object
:type element: etree._Element
:param attribute: The name of the attribute to evaluate
:type attribute: basestring
:param default... | Returns the value of an attribute, or a default if it's not defined
:param element: The XML Element object
:type element: etree._Element
:param attribute: The name of the attribute to evaluate
:type attribute: basestring
:param default: The default value to return if the attribute is not defined |
def _pwr_optfcn(df, loc):
'''
Function to find power from ``i_from_v``.
'''
I = _lambertw_i_from_v(df['r_sh'], df['r_s'], # noqa: E741, N806
df['nNsVth'], df[loc], df['i_0'], df['i_l'])
return I * df[loc] | Function to find power from ``i_from_v``. |
def geo_max_distance(left, right):
"""Returns the 2-dimensional maximum distance between two geometries in
projected units. If g1 and g2 is the same geometry the function will
return the distance between the two vertices most far from each other
in that geometry
Parameters
----------
left :... | Returns the 2-dimensional maximum distance between two geometries in
projected units. If g1 and g2 is the same geometry the function will
return the distance between the two vertices most far from each other
in that geometry
Parameters
----------
left : geometry
right : geometry
Return... |
def _isdictclass(obj):
'''Return True for known dict objects.
'''
c = getattr(obj, '__class__', None)
return c and c.__name__ in _dict_classes.get(c.__module__, ()) | Return True for known dict objects. |
def mock(config_or_spec=None, spec=None, strict=OMITTED):
"""Create 'empty' objects ('Mocks').
Will create an empty unconfigured object, that you can pass
around. All interactions (method calls) will be recorded and can be
verified using :func:`verify` et.al.
A plain `mock()` will be not `strict`,... | Create 'empty' objects ('Mocks').
Will create an empty unconfigured object, that you can pass
around. All interactions (method calls) will be recorded and can be
verified using :func:`verify` et.al.
A plain `mock()` will be not `strict`, and thus all methods regardless
of the arguments will return... |
def cubic_bezier(document, coords):
"cubic bezier polyline"
element = document.createElement('path')
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
path = ["M%s %s" %points[0]]
for n in xrange(1, len(points), 3):
A, B, C = points[n:n+3]
path.append("C%s,%s %s,%s %s,%s" % (A[0], A[1]... | cubic bezier polyline |
def shot_end_data(shot, role):
"""Return the data for endframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the end
:rtype: depending on role
:raises: No... | Return the data for endframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the end
:rtype: depending on role
:raises: None |
def array_map2(*referls,**kwargs):
'''
obseleted just for compatible
from elist.elist import *
ol = [1,2,3,4]
refl1 = ['+','+','+','+']
refl2 = [7,7,7,7]
refl3 = ['=','=','=','=']
def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):
s = pref... | obseleted just for compatible
from elist.elist import *
ol = [1,2,3,4]
refl1 = ['+','+','+','+']
refl2 = [7,7,7,7]
refl3 = ['=','=','=','=']
def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):
s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) ... |
def do_types_overlap(schema, type_a, type_b):
"""Check whether two types overlap in a given schema.
Provided two composite types, determine if they "overlap". Two composite types
overlap when the Sets of possible concrete types for each intersect.
This is often used to determine if a fragment of a giv... | Check whether two types overlap in a given schema.
Provided two composite types, determine if they "overlap". Two composite types
overlap when the Sets of possible concrete types for each intersect.
This is often used to determine if a fragment of a given type could possibly be
visited in a context of... |
def getRgbdData(self):
'''
Returns last RgbdData.
@return last JdeRobotTypes Rgbd saved
'''
self.lock.acquire()
data = self.data
self.lock.release()
return data | Returns last RgbdData.
@return last JdeRobotTypes Rgbd saved |
def from_geometry(cls, molecule, do_orders=False, scaling=1.0):
"""Construct a MolecularGraph object based on interatomic distances
All short distances are computed with the binning module and compared
with a database of bond lengths. Based on this comparison, bonded
atoms are ... | Construct a MolecularGraph object based on interatomic distances
All short distances are computed with the binning module and compared
with a database of bond lengths. Based on this comparison, bonded
atoms are detected.
Before marking a pair of atoms A and B as bonded, it ... |
def copy(self):
"""
Returns a copy of ClusterGraph.
Returns
-------
ClusterGraph: copy of ClusterGraph
Examples
-------
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b'), ('b', '... | Returns a copy of ClusterGraph.
Returns
-------
ClusterGraph: copy of ClusterGraph
Examples
-------
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> G = ClusterGraph()
>>> G.add_nodes_from([('a', 'b'), ('b', 'c')])
>>> G.add_edge(('a', '... |
def get_flagged_args():
"""get_flagged_args
Collects from the execution statement the arguments provided to this script.
The items are then interpretted and returned. The object expected are the
KvP's:
--os_type - the operating system type to be built
--os_version - the operating system version to be buil... | get_flagged_args
Collects from the execution statement the arguments provided to this script.
The items are then interpretted and returned. The object expected are the
KvP's:
--os_type - the operating system type to be built
--os_version - the operating system version to be built
NOTE: by not using these opti... |
def find_skew(self):
"""Returns a tuple (deskew angle in degrees, confidence value).
Returns (None, None) if no angle is available.
"""
with _LeptonicaErrorTrap():
angle = ffi.new('float *', 0.0)
confidence = ffi.new('float *', 0.0)
result = lept.pixF... | Returns a tuple (deskew angle in degrees, confidence value).
Returns (None, None) if no angle is available. |
def write_json(dictionary, filename):
"""Write dictionary to JSON"""
with open(filename, 'w') as data_file:
json.dump(dictionary, data_file, indent=4, sort_keys=True)
print('--> Wrote ' + os.path.basename(filename)) | Write dictionary to JSON |
def import_pyqt4(version=2):
"""
Import PyQt4
Parameters
----------
version : 1, 2, or None
Which QString/QVariant API to use. Set to None to use the system
default
ImportErrors raised within this function are non-recoverable
"""
# The new-style string API (version=2) autom... | Import PyQt4
Parameters
----------
version : 1, 2, or None
Which QString/QVariant API to use. Set to None to use the system
default
ImportErrors raised within this function are non-recoverable |
def role_create(auth=None, **kwargs):
'''
Create a role
CLI Example:
.. code-block:: bash
salt '*' keystoneng.role_create name=role1
salt '*' keystoneng.role_create name=role1 domain_id=b62e76fbeeff4e8fb77073f591cf211e
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_k... | Create a role
CLI Example:
.. code-block:: bash
salt '*' keystoneng.role_create name=role1
salt '*' keystoneng.role_create name=role1 domain_id=b62e76fbeeff4e8fb77073f591cf211e |
def _deps_only_toggled(self, widget, data=None):
"""
Function deactivate options in case of deps_only and opposite
"""
active = widget.get_active()
self.dir_name.set_sensitive(not active)
self.entry_project_name.set_sensitive(not active)
self.dir_name_browse_btn.s... | Function deactivate options in case of deps_only and opposite |
def _schema_get_docstring(starting_class):
""" Given a class, return its docstring.
If no docstring is present for the class, search base classes in MRO for a
docstring.
"""
for cls in inspect.getmro(starting_class):
if inspect.getdoc(cls):
return inspect.getdoc(cls) | Given a class, return its docstring.
If no docstring is present for the class, search base classes in MRO for a
docstring. |
def _add_timedelta(self, delta):
"""
Add timedelta duration to the instance.
:param delta: The timedelta instance
:type delta: pendulum.Duration or datetime.timedelta
:rtype: Date
"""
if isinstance(delta, pendulum.Duration):
return self.add(
... | Add timedelta duration to the instance.
:param delta: The timedelta instance
:type delta: pendulum.Duration or datetime.timedelta
:rtype: Date |
def update_vip_request(self, vip_request, vip_request_id):
"""
Method to update vip request
param vip_request: vip_request object
param vip_request_id: vip_request id
"""
uri = 'api/v3/vip-request/%s/' % vip_request_id
data = dict()
data['vips'] = list()... | Method to update vip request
param vip_request: vip_request object
param vip_request_id: vip_request id |
def _handle_input_request(self, msg):
"""Save history and add a %plot magic."""
if self._hidden:
raise RuntimeError('Request for raw input during hidden execution.')
# Make sure that all output from the SUB channel has been processed
# before entering readline mode.
... | Save history and add a %plot magic. |
def paragraph(
self,
nb_sentences=3,
variable_nb_sentences=True,
ext_word_list=None):
"""
:returns: A single paragraph. For example: 'Sapiente sunt omnis. Ut
pariatur ad autem ducimus et. Voluptas rem voluptas sint modi dolorem amet.'
... | :returns: A single paragraph. For example: 'Sapiente sunt omnis. Ut
pariatur ad autem ducimus et. Voluptas rem voluptas sint modi dolorem amet.'
Keyword arguments:
:param nb_sentences: around how many sentences the paragraph should contain
:param variable_nb_sentences: set to false ... |
def datetime_from_iso_format(string):
"""
Return a datetime object from an iso 8601 representation.
Return None if string is non conforming.
"""
match = DATE_ISO_REGEX.match(string)
if match:
date = datetime.datetime(year=int(match.group(DATE_ISO_YEAR_GRP)),
... | Return a datetime object from an iso 8601 representation.
Return None if string is non conforming. |
def next_frame_glow_hparams():
"""Hparams for next_frame_glow."""
hparams = glow.glow_hparams()
# Possible modes are conditional and unconditional
hparams.add_hparam("gen_mode", "conditional")
hparams.add_hparam("learn_top_scale", False)
hparams.add_hparam("condition_all_levels", True)
# For each video, s... | Hparams for next_frame_glow. |
def __build_libxml2(target, source, env):
"""
General XSLT builder (HTML/FO), using the libxml2 module.
"""
xsl_style = env.subst('$DOCBOOK_XSL')
styledoc = libxml2.parseFile(xsl_style)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(source[0]),None,libxml2.XML_PARSE_... | General XSLT builder (HTML/FO), using the libxml2 module. |
def getComment(self, repo_user, repo_name, comment_id):
"""
GET /repos/:owner/:repo/pull/comments/:number
:param comment_id: The review comment's ID.
"""
return self.api.makeRequest(
['repos', repo_user, repo_name,
'pulls', 'comments', str(comment_id)]) | GET /repos/:owner/:repo/pull/comments/:number
:param comment_id: The review comment's ID. |
def transform(source):
'''Used to convert the source code, making use of known transformers.
"transformers" are modules which must contain a function
transform_source(source)
which returns a tranformed source.
Some transformers (for example, those found in the standard library
... | Used to convert the source code, making use of known transformers.
"transformers" are modules which must contain a function
transform_source(source)
which returns a tranformed source.
Some transformers (for example, those found in the standard library
module lib2to3) cannot cop... |
def get(self, name, default=None):
"""
Returns the value of the given variable, or the given default
value if the variable is not defined.
:type name: string
:param name: The name of the variable.
:type default: object
:param default: The default value.
... | Returns the value of the given variable, or the given default
value if the variable is not defined.
:type name: string
:param name: The name of the variable.
:type default: object
:param default: The default value.
:rtype: object
:return: The value of the vari... |
def _get_url(self, resource, item, sys_id=None):
"""Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string
"""
url_str = '%(base_url)s/%(b... | Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string |
def get_resource(self, name=None, store=None, workspace=None):
'''
returns a single resource object.
Will return None if no resource is found.
Will raise an error if more than one resource with the same name is found.
'''
resources = self.get_resources(names=name, ... | returns a single resource object.
Will return None if no resource is found.
Will raise an error if more than one resource with the same name is found. |
def load_csv(ctx, model, path, header=None, header_exclude=None, **fmtparams):
"""Load a CSV from a file path.
:param ctx: Anthem context
:param model: Odoo model name or model klass from env
:param path: absolute or relative path to CSV file.
If a relative path is given you must provide a valu... | Load a CSV from a file path.
:param ctx: Anthem context
:param model: Odoo model name or model klass from env
:param path: absolute or relative path to CSV file.
If a relative path is given you must provide a value for
`ODOO_DATA_PATH` in your environment
or set `--odoo-data-path` o... |
def Vgg19_simple_api(rgb):
"""
Build the VGG 19 Model
Parameters
-----------
rgb : rgb image placeholder [batch, height, width, 3] values scaled [0, 1]
"""
start_time = time.time()
print("build model started")
rgb_scaled = rgb * 255.0
# Convert RGB to BGR
red, green, blue = ... | Build the VGG 19 Model
Parameters
-----------
rgb : rgb image placeholder [batch, height, width, 3] values scaled [0, 1] |
def add_price_entity(self, price: dal.Price):
""" Adds the price """
from decimal import Decimal
# check if the price already exists in db.
repo = self.get_price_repository()
existing = (
repo.query
.filter(dal.Price.namespace == price.namespace)
... | Adds the price |
def update_payload(self, fields=None):
"""Wrap submitted data within an extra dict."""
payload = super(ProvisioningTemplate, self).update_payload(fields)
if 'template_combinations' in payload:
payload['template_combinations_attributes'] = payload.pop(
'template_combin... | Wrap submitted data within an extra dict. |
def insert_sections_some(ol,*secs,**kwargs):
'''
ol = initRange(0,20,1)
ol
loc = 6
rslt = insert_sections_some(ol,['a','a','a'],['c','c','c','c'],index=loc)
rslt
####
'''
if('mode' in kwargs):
mode = kwargs["mode"]
else:
mode = "new"
lo... | ol = initRange(0,20,1)
ol
loc = 6
rslt = insert_sections_some(ol,['a','a','a'],['c','c','c','c'],index=loc)
rslt
#### |
def rmswidth(self, floor=0):
"""Calculate :ref:`pysynphot-formula-rmswidth`.
Parameters
----------
floor : float
Throughput values equal or below this threshold are not
included in the calculation. By default (0), all points
are included.
Ret... | Calculate :ref:`pysynphot-formula-rmswidth`.
Parameters
----------
floor : float
Throughput values equal or below this threshold are not
included in the calculation. By default (0), all points
are included.
Returns
-------
ans : float... |
def collapse( self, direction ):
"""
Collapses this splitter handle before or after other widgets based on \
the inputed CollapseDirection.
:param direction | <XSplitterHandle.CollapseDirection>
:return <bool> | success
"""
if ( self.isC... | Collapses this splitter handle before or after other widgets based on \
the inputed CollapseDirection.
:param direction | <XSplitterHandle.CollapseDirection>
:return <bool> | success |
def tail(
self, line_prefix=None, callback=None, output_callback=None, stop_callback=lambda x: False,
timeout=None
):
"""
This function takes control of an SSH channel and displays line
by line of output as \n is recieved. This function is specifically
made for tail-... | This function takes control of an SSH channel and displays line
by line of output as \n is recieved. This function is specifically
made for tail-like commands.
:param line_prefix: Text to append to the left of each line of output.
This is especially useful if you ar... |
def setParametersFromFile(dna, filename, parameters=None, bp=None):
"""Read a specific parameter from the do_x3dna output file.
It automatically load the input parameter from a file to dna object or HDF5 file.
It automatically decides from input parameter, what will be format of input file.
Parameters
... | Read a specific parameter from the do_x3dna output file.
It automatically load the input parameter from a file to dna object or HDF5 file.
It automatically decides from input parameter, what will be format of input file.
Parameters
----------
dna : :class:`DNA`
Input :class:`DNA` instance.
... |
def load_project_definition(path: str) -> dict:
"""
Load the cauldron.json project definition file for the given path. The
path can be either a source path to the cauldron.json file or the source
directory where a cauldron.json file resides.
:param path:
The source path or directory where t... | Load the cauldron.json project definition file for the given path. The
path can be either a source path to the cauldron.json file or the source
directory where a cauldron.json file resides.
:param path:
The source path or directory where the definition file will be loaded |
def reply_message(self, message_url, body):
"""回复某条站内消息
:param message_url: 该条消息的页面 URL
:param body: 内容(不能超过 1024 个字符)
"""
id = re.findall(r'(\d+)/?$', message_url)[0]
api = 'http://www.shanbay.com/api/v1/message/%s/reply/'
url = api % id
data = {
... | 回复某条站内消息
:param message_url: 该条消息的页面 URL
:param body: 内容(不能超过 1024 个字符) |
def blockvisit(self, nodes, frame):
"""Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False.
"""
if frame.buffer is None:
self.writeline... | Visit a list of nodes as block in a frame. If the current frame
is no buffer a dummy ``if 0: yield None`` is written automatically
unless the force_generator parameter is set to False. |
def _check_pillar(kwargs, pillar=None):
'''
Check the pillar for errors, refuse to run the state if there are errors
in the pillar and return the pillar errors
'''
if kwargs.get('force'):
return True
pillar_dict = pillar if pillar is not None else __pillar__
if '_errors' in pillar_di... | Check the pillar for errors, refuse to run the state if there are errors
in the pillar and return the pillar errors |
def _generate_create_dict(self,
hostname=None,
domain=None,
flavor=None,
router=None,
datacenter=None,
hourly=True):
"""Translates a... | Translates args into a dictionary for creating a dedicated host. |
def get_gender(data):
"""Retrieve gender from metadata, codified as male/female/unknown.
"""
g = str(dd.get_gender(data))
if g and str(g).lower() in ["male", "m", "1"]:
return "male"
elif g and str(g).lower() in ["female", "f", "2"]:
return "female"
else:
return "unknown" | Retrieve gender from metadata, codified as male/female/unknown. |
def write_yum_repo(content, filename='ceph.repo'):
"""add yum repo file in /etc/yum.repos.d/"""
repo_path = os.path.join('/etc/yum.repos.d', filename)
if not isinstance(content, str):
content = content.decode('utf-8')
write_file(repo_path, content.encode('utf-8')) | add yum repo file in /etc/yum.repos.d/ |
def task_add(self, description, tags=None, **kw):
""" Add a new task.
Takes any of the keywords allowed by taskwarrior like proj or prior.
"""
task = self._stub_task(description, tags, **kw)
# Check if there are annotations, if so remove them from the
# task and add the... | Add a new task.
Takes any of the keywords allowed by taskwarrior like proj or prior. |
def collect_results():
"""Runs all platforms/backends/benchmarks and returns as list of
BenchmarkResults, sorted by benchmark and time taken.
"""
results = []
for exe, backendname in EXE_BACKEND_MATRIX:
results.extend(benchmark_process_and_backend(exe, backendname))
results.extend(benchm... | Runs all platforms/backends/benchmarks and returns as list of
BenchmarkResults, sorted by benchmark and time taken. |
def phases_with(self, **kwargs) -> [PhaseOutput]:
"""
Filters phases. If no arguments are passed all phases are returned. Arguments must be key value pairs, with
phase, data or pipeline as the key.
Parameters
----------
kwargs
Filters, e.g. pipeline=pipeline1... | Filters phases. If no arguments are passed all phases are returned. Arguments must be key value pairs, with
phase, data or pipeline as the key.
Parameters
----------
kwargs
Filters, e.g. pipeline=pipeline1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.