code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def cancel_completion(self):
"""Cancel the completion
should be called when the completer have to be dismissed
This reset internal variable, clearing the temporary buffer
of the console where the completion are shown.
"""
self._consecutive_tab = 0
self._slice_st... | Cancel the completion
should be called when the completer have to be dismissed
This reset internal variable, clearing the temporary buffer
of the console where the completion are shown. |
def commitreturn(self, cursor, qstring, vals=()):
"careful: don't pass cursor (it's from decorator)"
cursor.execute(qstring, vals)
return cursor.fetchall()[0] | careful: don't pass cursor (it's from decorator) |
def _string_to_sign(item, table_name, attribute_actions):
# type: (dynamodb_types.ITEM, Text, AttributeActions) -> bytes
"""Generate the string to sign from an encrypted item and configuration.
:param dict item: Encrypted DynamoDB item
:param str table_name: Table name to use when generating the string... | Generate the string to sign from an encrypted item and configuration.
:param dict item: Encrypted DynamoDB item
:param str table_name: Table name to use when generating the string to sign
:param AttributeActions attribute_actions: Actions to take for item |
def plot_correlation(self, freq=None, title=None,
figsize=(12, 6), **kwargs):
"""
Utility function to plot correlations.
Args:
* freq (str): Pandas data frequency alias string
* title (str): Plot title
* figsize (tuple (x,y)): figure ... | Utility function to plot correlations.
Args:
* freq (str): Pandas data frequency alias string
* title (str): Plot title
* figsize (tuple (x,y)): figure size
* kwargs: passed to Pandas' plot_corr_heatmap function |
def verify(self, key):
"""
Verifies a signature on a certificate request.
:param PKey key: The public key that signature is supposedly from.
:return: ``True`` if the signature is correct.
:rtype: bool
:raises OpenSSL.crypto.Error: If the signature is invalid, or there ... | Verifies a signature on a certificate request.
:param PKey key: The public key that signature is supposedly from.
:return: ``True`` if the signature is correct.
:rtype: bool
:raises OpenSSL.crypto.Error: If the signature is invalid, or there was
a problem verifying the sig... |
def JUMPI(self, dest, cond):
"""Conditionally alter the program counter"""
self.pc = Operators.ITEBV(256, cond != 0, dest, self.pc + self.instruction.size)
#This set ups a check for JMPDEST in the next instruction if cond != 0
self._set_check_jmpdest(cond != 0) | Conditionally alter the program counter |
def deobfuscate(cls, data):
"""
Reverses the obfuscation done by the :meth:`obfuscate` method.
If an identifier arrives without correct base64 padding this
function will append it to the end.
"""
# the str() call is necessary to convert the unicode string
# to an ... | Reverses the obfuscation done by the :meth:`obfuscate` method.
If an identifier arrives without correct base64 padding this
function will append it to the end. |
def write_pdf(pdf_obj, destination):
"""
Write PDF object to file
:param pdf_obj: PDF object to be written to file
:param destination: Desintation path
"""
reader = PdfFileReader(pdf_obj) # Create new PDF object
writer = PdfFileWriter()
page_count = reader.getNumPages()
# add the ... | Write PDF object to file
:param pdf_obj: PDF object to be written to file
:param destination: Desintation path |
def alpha_(self,x):
""" Create a mappable function alpha to apply to each xmin in a list of xmins.
This is essentially the slow version of fplfit/cplfit, though I bet it could
be speeded up with a clever use of parellel_map. Not intended to be used by users."""
def alpha(xmin,x=x):
... | Create a mappable function alpha to apply to each xmin in a list of xmins.
This is essentially the slow version of fplfit/cplfit, though I bet it could
be speeded up with a clever use of parellel_map. Not intended to be used by users. |
def resize(self, new_data_size):
"""Resize the file and update the chunk sizes"""
resize_bytes(
self.__fileobj, self.data_size, new_data_size, self.data_offset)
self._update_size(new_data_size) | Resize the file and update the chunk sizes |
def SdkSetup(self):
"""
Microsoft Windows SDK Setup
"""
if self.vc_ver > 9.0:
return []
return [os.path.join(self.si.WindowsSdkDir, 'Setup')] | Microsoft Windows SDK Setup |
def post_mortem(tb=None, host='', port=5555, patch_stdstreams=False):
"""
Start post-mortem debugging for the provided traceback object
If no traceback is provided the debugger tries to obtain a traceback
for the last unhandled exception.
Example::
try:
# Some error-prone code... | Start post-mortem debugging for the provided traceback object
If no traceback is provided the debugger tries to obtain a traceback
for the last unhandled exception.
Example::
try:
# Some error-prone code
assert ham == spam
except:
web_pdb.post_mortem()
... |
def create(text,score,prompt_string, dump_data=False):
"""
Creates a machine learning model from input text, associated scores, a prompt, and a path to the model
TODO: Remove model path argument, it is needed for now to support legacy code
text - A list of strings containing the text of the essays
s... | Creates a machine learning model from input text, associated scores, a prompt, and a path to the model
TODO: Remove model path argument, it is needed for now to support legacy code
text - A list of strings containing the text of the essays
score - a list of integers containing score values
prompt_string... |
def _expand_json(self, j):
"""Decompress the BLOB portion of the usernotes.
Arguments:
j: the JSON returned from the wiki page (dict)
Returns a Dict with the 'blob' key removed and a 'users' key added
"""
decompressed_json = copy.copy(j)
decompressed_json.po... | Decompress the BLOB portion of the usernotes.
Arguments:
j: the JSON returned from the wiki page (dict)
Returns a Dict with the 'blob' key removed and a 'users' key added |
def resize(self, new_size):
"""
Resizes this disk. The Linode Instance this disk belongs to must have
sufficient space available to accommodate the new size, and must be
offline.
**NOTE** If resizing a disk down, the filesystem on the disk must still
fit on the new disk... | Resizes this disk. The Linode Instance this disk belongs to must have
sufficient space available to accommodate the new size, and must be
offline.
**NOTE** If resizing a disk down, the filesystem on the disk must still
fit on the new disk size. You may need to resize the filesystem on... |
def cli(ctx, config, quiet):
"""AWS ECS Docker Deployment Tool"""
ctx.obj = {}
ctx.obj['config'] = load_config(config.read()) # yaml.load(config.read())
ctx.obj['quiet'] = quiet
log(ctx, ' * ' + rnd_scotty_quote() + ' * ') | AWS ECS Docker Deployment Tool |
def populate(self, blueprint, documents):
"""Populate the database with documents"""
# Finish the documents
documents = self.finish(blueprint, documents)
# Convert the documents to frame instances
frames = []
for document in documents:
# Separate out any met... | Populate the database with documents |
def update(self):
"""Called before the listing renders
"""
super(AnalysisRequestsView, self).update()
self.workflow = api.get_tool("portal_workflow")
self.member = self.mtool.getAuthenticatedMember()
self.roles = self.member.getRoles()
setup = api.get_bika_setup... | Called before the listing renders |
def _create_entry(self, name, values, fbterm=False):
''' Render first values as string and place as first code,
save, and return attr.
'''
if fbterm:
attr = _PaletteEntryFBTerm(self, name.upper(), ';'.join(values))
else:
attr = _PaletteEntry(self, name... | Render first values as string and place as first code,
save, and return attr. |
def welcome_if_new(self, node):
"""
Given a new node, send it all the keys/values it should be storing,
then add it to the routing table.
@param node: A new node that just joined (or that we just found out
about).
Process:
For each key in storage, get k closest ... | Given a new node, send it all the keys/values it should be storing,
then add it to the routing table.
@param node: A new node that just joined (or that we just found out
about).
Process:
For each key in storage, get k closest nodes. If newnode is closer
than the furthe... |
def run(self):
r"""
Overrides the default run() method.
Performs the complete analysis on the model specified during initialisation.
:return: an ODE problem which can be further used in inference and simulation.
:rtype: :class:`~means.core.problems.ODEProblem`
"""... | r"""
Overrides the default run() method.
Performs the complete analysis on the model specified during initialisation.
:return: an ODE problem which can be further used in inference and simulation.
:rtype: :class:`~means.core.problems.ODEProblem` |
def geo_area(arg, use_spheroid=None):
"""
Compute area of a geo spatial data
Parameters
----------
arg : geometry or geography
use_spheroid: default None
Returns
-------
area : double scalar
"""
op = ops.GeoArea(arg, use_spheroid)
return op.to_expr() | Compute area of a geo spatial data
Parameters
----------
arg : geometry or geography
use_spheroid: default None
Returns
-------
area : double scalar |
def random_string_array(max_len=1, min_len=1,
elem_max_len=1, elem_min_len=1,
strings=string.ascii_letters, **kwargs):
"""
:param max_len: max value of len(array)
:param min_len: min value of len(array)
:param elem_max_len: max valu... | :param max_len: max value of len(array)
:param min_len: min value of len(array)
:param elem_max_len: max value of len(array[index])
:param elem_min_len: min value of len(array[index])
:param strings: allowed string characters in each element of array,
or predefined list of string... |
def ParseContainersTable(
self, parser_mediator, database=None, table=None, **unused_kwargs):
"""Parses the Containers table.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
database (Optional[pyesedb.fil... | Parses the Containers table.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
database (Optional[pyesedb.file]): ESE database.
table (Optional[pyesedb.table]): table.
Raises:
ValueError: if the data... |
def clear(self):
"""Clear current state."""
# Adapted from http://stackoverflow.com/a/13103617/1198772
for i in reversed(list(range(self.extra_keywords_layout.count()))):
self.extra_keywords_layout.itemAt(i).widget().setParent(None)
self.widgets_dict = OrderedDict() | Clear current state. |
def delete_key(self, key_to_delete):
"""Deletes the specified key
:param key_to_delete:
:return:
"""
log = logging.getLogger(self.cls_logger + '.delete_key')
log.info('Attempting to delete key: {k}'.format(k=key_to_delete))
try:
self.s3client.delete_... | Deletes the specified key
:param key_to_delete:
:return: |
def query_boost_version(boost_root):
'''
Read in the Boost version from a given boost_root.
'''
boost_version = None
if os.path.exists(os.path.join(boost_root,'Jamroot')):
with codecs.open(os.path.join(boost_root,'Jamroot'), 'r', 'utf-8') as f:
for lin... | Read in the Boost version from a given boost_root. |
def merge_conf(to_hash, other_hash, path=[]):
"merges other_hash into to_hash"
for key in other_hash:
if (key in to_hash and isinstance(to_hash[key], dict)
and isinstance(other_hash[key], dict)):
merge_conf(to_hash[key], other_hash[key], path + [str(key)])
else:
... | merges other_hash into to_hash |
def split_python_text_into_lines(text):
"""
# TODO: make it so this function returns text so one statment is on one
# line that means no splitting up things like function definitions into
# multiple lines
"""
#import jedi
#script = jedi.Script(text, line=1, column=None, path='')
def pare... | # TODO: make it so this function returns text so one statment is on one
# line that means no splitting up things like function definitions into
# multiple lines |
def load_dict(self, source, namespace='', make_namespaces=False):
''' Import values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}})
{'name.space.key': 'value'}
'''
sta... | Import values from a dictionary structure. Nesting can be used to
represent namespaces.
>>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}})
{'name.space.key': 'value'} |
def list_enrollment_claims(self, **kwargs):
"""List"""
kwargs = self._verify_sort_options(kwargs)
kwargs = self._verify_filters(kwargs, EnrollmentClaim)
api = self._get_api(enrollment.PublicAPIApi)
return PaginatedResponse(
api.get_device_enrollments,
lwra... | List |
def to_dict(self, remove_nones=False):
"""Return a dict representation of the `DidlResource`.
Args:
remove_nones (bool, optional): Optionally remove dictionary
elements when their value is `None`.
Returns:
dict: a dict representing the `DidlResource`
... | Return a dict representation of the `DidlResource`.
Args:
remove_nones (bool, optional): Optionally remove dictionary
elements when their value is `None`.
Returns:
dict: a dict representing the `DidlResource` |
def interleave(*args):
'''Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple verte... | Interleaves the elements of the provided arrays.
>>> a = [(0, 0), (1, 0), (2, 0), (3, 0)]
>>> b = [(0, 0), (0, 1), (0, 2), (0, 3)]
>>> interleave(a, b)
[(0, 0, 0, 0), (1, 0, 0, 1), (2, 0, 0, 2), (3, 0, 0, 3)]
This is useful for combining multiple vertex attributes into a single
... |
def wait_port_open(server, port, timeout=None):
""" Wait for network service to appear
@param server: host to connect to (str)
@param port: port (int)
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
... | Wait for network service to appear
@param server: host to connect to (str)
@param port: port (int)
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
throw unhandled network exception |
def get_image(self, size=SIZE_EXTRA_LARGE):
"""
Returns the user's avatar
size can be one of:
SIZE_EXTRA_LARGE
SIZE_LARGE
SIZE_MEDIUM
SIZE_SMALL
"""
doc = self._request(self.ws_prefix + ".getInfo", True)
return _extract_al... | Returns the user's avatar
size can be one of:
SIZE_EXTRA_LARGE
SIZE_LARGE
SIZE_MEDIUM
SIZE_SMALL |
def _handle_pagerange(pagerange):
"""
Yields start and end pages from DfR pagerange field.
Parameters
----------
pagerange : str or unicode
DfR-style pagerange, e.g. "pp. 435-444".
Returns
-------
start : str
Start page.
end : str
End page.
"""
try:... | Yields start and end pages from DfR pagerange field.
Parameters
----------
pagerange : str or unicode
DfR-style pagerange, e.g. "pp. 435-444".
Returns
-------
start : str
Start page.
end : str
End page. |
def add_port_profile(self, profile_name, vlan_id, device_id):
"""Adds a port profile and its vlan_id to the table."""
if not self.get_port_profile_for_vlan(vlan_id, device_id):
port_profile = ucsm_model.PortProfile(profile_id=profile_name,
vl... | Adds a port profile and its vlan_id to the table. |
def ConsultarCertificacionUltNroOrden(self, pto_emision=1):
"Consulta el último No de orden registrado para CG"
ret = self.client.cgConsultarUltimoNroOrden(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
... | Consulta el último No de orden registrado para CG |
def mbar_log_W_nk(u_kn, N_k, f_k):
"""Calculate the log weight matrix.
Parameters
----------
u_kn : np.ndarray, shape=(n_states, n_samples), dtype='float'
The reduced potential energies, i.e. -log unnormalized probabilities
N_k : np.ndarray, shape=(n_states), dtype='int'
The number ... | Calculate the log weight matrix.
Parameters
----------
u_kn : np.ndarray, shape=(n_states, n_samples), dtype='float'
The reduced potential energies, i.e. -log unnormalized probabilities
N_k : np.ndarray, shape=(n_states), dtype='int'
The number of samples in each state
f_k : np.ndar... |
def _enum_member_error(err, eid, name, value, bitmask):
"""Format enum member error."""
exception, msg = ENUM_ERROR_MAP[err]
enum_name = idaapi.get_enum_name(eid)
return exception(('add_enum_member(enum="{}", member="{}", value={}, bitmask=0x{:08X}) '
'failed: {}').format(
... | Format enum member error. |
def set_next_week_day(val, week_day, iso=False):
"""
Set week day.
New date will be greater or equal than input date.
:param val: datetime or date
:type val: datetime.datetime | datetime.date
:param week_day: Week day to set
:type week_day: int
:param iso: week_day in ISO format, or not
... | Set week day.
New date will be greater or equal than input date.
:param val: datetime or date
:type val: datetime.datetime | datetime.date
:param week_day: Week day to set
:type week_day: int
:param iso: week_day in ISO format, or not
:type iso: bool
:return: datetime.datetime | datetime... |
def _register_bindings(self, data):
"""
connection_handler method which is called when we connect to pusher.
Responsible for binding callbacks to channels before we connect.
:return:
"""
self._register_diff_order_book_channels()
self._register_live_orders_channels... | connection_handler method which is called when we connect to pusher.
Responsible for binding callbacks to channels before we connect.
:return: |
def blend_html_colour_to_white(html_colour, alpha):
"""
:param html_colour: Colour string like FF552B or #334455
:param alpha: Alpha value
:return: Html colour alpha blended onto white
"""
html_colour = html_colour.upper()
has_hash = False
if html_colour[0] == '#':
has_hash = Tru... | :param html_colour: Colour string like FF552B or #334455
:param alpha: Alpha value
:return: Html colour alpha blended onto white |
def classify(self, text=u''):
""" Predicts the Language of a given text.
:param text: Unicode text to be classified.
"""
text = self.lm.normalize(text)
tokenz = LM.tokenize(text, mode='c')
result = self.lm.calculate(doc_terms=tokenz)
#print 'Karbasa:', self.... | Predicts the Language of a given text.
:param text: Unicode text to be classified. |
def update_resources_from_resfile(self, srcpath, types=None, names=None,
languages=None):
"""
Update or add resources from dll/exe file srcpath.
types = a list of resource types to update (None = all)
names = a list of resource names to upd... | Update or add resources from dll/exe file srcpath.
types = a list of resource types to update (None = all)
names = a list of resource names to update (None = all)
languages = a list of resource languages to update (None = all) |
def mount_status_send(self, target_system, target_component, pointing_a, pointing_b, pointing_c, force_mavlink1=False):
'''
Message with some status from APM to GCS about camera or antenna mount
target_system : System ID (uint8_t)
target_compo... | Message with some status from APM to GCS about camera or antenna mount
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
pointing_a : pitch(deg*100) (int32_t)
pointing_b : roll... |
def set(cls, obj, keys, value, fill_list_value=None):
"""
sets the value for the given keys on obj. if any of the given
keys does not exist, create the intermediate containers.
"""
current = obj
keys_list = keys.split(".")
for idx, key in enumerate(keys_list, 1):... | sets the value for the given keys on obj. if any of the given
keys does not exist, create the intermediate containers. |
def first_spark_call():
"""
Return a CallSite representing the first Spark call in the current call stack.
"""
tb = traceback.extract_stack()
if len(tb) == 0:
return None
file, line, module, what = tb[len(tb) - 1]
sparkpath = os.path.dirname(file)
first_spark_frame = len(tb) - 1
... | Return a CallSite representing the first Spark call in the current call stack. |
def is_recording():
"""Get status on recording/not recording.
Returns
-------
Current state of recording.
"""
curr = ctypes.c_bool()
check_call(_LIB.MXAutogradIsRecording(ctypes.byref(curr)))
return curr.value | Get status on recording/not recording.
Returns
-------
Current state of recording. |
def uuids(self):
""" Extract uuid from each item of specified ``seq``.
"""
for f in self._seq:
if isinstance(f, File):
yield f.uuid
elif isinstance(f, six.string_types):
yield f
else:
raise ValueError(
... | Extract uuid from each item of specified ``seq``. |
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
... | Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kw... |
def loop(self, *tags):
""" Iterates over the tags in the entire Sentence,
For example, Sentence.loop(POS, LEMMA) yields tuples of the part-of-speech tags and lemmata.
Possible tags: WORD, LEMMA, POS, CHUNK, PNP, RELATION, ROLE, ANCHOR or a custom word tag.
Any order or combi... | Iterates over the tags in the entire Sentence,
For example, Sentence.loop(POS, LEMMA) yields tuples of the part-of-speech tags and lemmata.
Possible tags: WORD, LEMMA, POS, CHUNK, PNP, RELATION, ROLE, ANCHOR or a custom word tag.
Any order or combination of tags can be supplied. |
def InitSiteCheck(self):
"""
make an interactive grid in which users can edit site names
as well as which location a site belongs to
"""
# propagate average lat/lon info from samples table if
# available in samples and missing in sites
self.contribution.propagate_... | make an interactive grid in which users can edit site names
as well as which location a site belongs to |
def failed_hosts(self) -> Dict[str, "MultiResult"]:
"""
Hosts that failed to complete the task
"""
return {k: v for k, v in self.result.items() if v.failed} | Hosts that failed to complete the task |
def run(command, *args):
""" run command """
# show all clusters
if command == 'clusters':
return clusters.run(command, *args)
# show topologies
elif command == 'topologies':
return topologies.run(command, *args)
# physical plan
elif command == 'containers':
return physicalplan.run_container... | run command |
def inverse_gaussian_gradient(image, alpha=100.0, sigma=5.0):
"""Inverse of gradient magnitude.
Compute the magnitude of the gradients in the image and then inverts the
result in the range [0, 1]. Flat areas are assigned values close to 1,
while areas close to borders are assigned values close to 0.
... | Inverse of gradient magnitude.
Compute the magnitude of the gradients in the image and then inverts the
result in the range [0, 1]. Flat areas are assigned values close to 1,
while areas close to borders are assigned values close to 0.
This function or a similar one defined by the user should be appli... |
def update_ssl_termination(self, securePort=None, enabled=None,
secureTrafficOnly=None):
"""
Updates existing SSL termination information for the load balancer
without affecting the existing certificates/keys.
"""
return self.manager.update_ssl_termination(self, secur... | Updates existing SSL termination information for the load balancer
without affecting the existing certificates/keys. |
def handle_left_double_click(self, info):
"""Whatever we want to do, when the VideoWidget has been double-clicked with the left button
"""
if (self.double_click_focus == False): # turn focus on
print(self.pre, "handle_left_double_click: focus on")
self.cb_focus()
... | Whatever we want to do, when the VideoWidget has been double-clicked with the left button |
def _parse_guild_disband_info(self, info_container):
"""
Parses the guild's disband info, if available.
Parameters
----------
info_container: :class:`bs4.Tag`
The parsed content of the information container.
"""
m = disband_regex.search(info_container... | Parses the guild's disband info, if available.
Parameters
----------
info_container: :class:`bs4.Tag`
The parsed content of the information container. |
def register(self, resource_class, content_type, configuration=None):
"""
Registers a representer factory for the given combination of resource
class and content type.
:param configuration: representer configuration. A default instance
will be created if this is not given.
... | Registers a representer factory for the given combination of resource
class and content type.
:param configuration: representer configuration. A default instance
will be created if this is not given.
:type configuration:
:class:`everest.representers.config.RepresenterConfi... |
def speaker_durations(utterances: List[Utterance]) -> List[Tuple[str, int]]:
""" Takes a list of utterances and itemizes them by speaker, returning a
list of tuples of the form (Speaker Name, duration).
"""
speaker_utters = make_speaker_utters(utterances)
speaker_duration_tuples = [] # type: List[... | Takes a list of utterances and itemizes them by speaker, returning a
list of tuples of the form (Speaker Name, duration). |
def sof(self):
"""
First start of frame (SOFn) marker in this sequence.
"""
for m in self._markers:
if m.marker_code in JPEG_MARKER_CODE.SOF_MARKER_CODES:
return m
raise KeyError('no start of frame (SOFn) marker in image') | First start of frame (SOFn) marker in this sequence. |
def execute(self, env, args):
""" Creates a new task.
`env`
Runtime ``Environment`` instance.
`args`
Arguments object from arg parser.
"""
task_name = args.task_name
clone_task = args.clone_task
if not env.task.create... | Creates a new task.
`env`
Runtime ``Environment`` instance.
`args`
Arguments object from arg parser. |
def swap_twitter_subject(subject, body):
"""If subject starts from 'Tweet from...'
then we need to get first meaning line from the body."""
if subject.startswith('Tweet from'):
lines = body.split('\n')
for idx, line in enumerate(lines):
if re.match(r'.*, ?\d{2}:\d{2}]]', line) i... | If subject starts from 'Tweet from...'
then we need to get first meaning line from the body. |
def get_profile_dir ():
"""Return path where all profiles of current user are stored."""
if os.name == 'nt':
basedir = unicode(os.environ["APPDATA"], nt_filename_encoding)
dirpath = os.path.join(basedir, u"Mozilla", u"Firefox", u"Profiles")
elif os.name == 'posix':
basedir = unicode(... | Return path where all profiles of current user are stored. |
def setItemStyle(self, itemStyle):
"""
Sets the item style that will be used for this widget. If you are
trying to set a style on an item that has children, make sure to turn
off the useGroupStyleWithChildren option, or it will always display as
a group.
... | Sets the item style that will be used for this widget. If you are
trying to set a style on an item that has children, make sure to turn
off the useGroupStyleWithChildren option, or it will always display as
a group.
:param itemStyle | <XGanttWidgetItem.ItemStyle> |
def clamped(self, point_or_rect):
"""
Returns the point or rectangle clamped to this rectangle.
"""
if isinstance(point_or_rect, Rect):
return Rect(np.minimum(self.mins, point_or_rect.mins),
np.maximum(self.maxes, point_or_rect.maxes))
return n... | Returns the point or rectangle clamped to this rectangle. |
def _bracket_exact_exec(self, symbol):
"""Checks builtin, local and global executable collections for the
specified symbol and returns it as soon as it is found."""
if symbol in self.context.module.executables:
return self.context.module.executables[symbol]
if symbol in self... | Checks builtin, local and global executable collections for the
specified symbol and returns it as soon as it is found. |
def save_project(self, project):
""" Called when project is saved/updated. """
pid = project.pid
# project created
# project updated
if project.is_active:
# project is not deleted
logger.debug("project is active")
ds_project = self.get_projec... | Called when project is saved/updated. |
def vline_score(self, x, ymin, ymax):
"""Returns the number of unbroken paths of qubits
>>> [(x,y,1,k) for y in range(ymin,ymax+1)]
for :math:`k = 0,1,\cdots,L-1`. This is precomputed for speed.
"""
return self._vline_score[x, ymin, ymax] | Returns the number of unbroken paths of qubits
>>> [(x,y,1,k) for y in range(ymin,ymax+1)]
for :math:`k = 0,1,\cdots,L-1`. This is precomputed for speed. |
def convert_to(obj, ac_ordered=False, ac_dict=None, **options):
"""
Convert a mapping objects to a dict or object of 'to_type' recursively.
Borrowed basic idea and implementation from bunch.unbunchify. (bunch is
distributed under MIT license same as this.)
:param obj: A mapping objects or other pri... | Convert a mapping objects to a dict or object of 'to_type' recursively.
Borrowed basic idea and implementation from bunch.unbunchify. (bunch is
distributed under MIT license same as this.)
:param obj: A mapping objects or other primitive object
:param ac_ordered: Use OrderedDict instead of dict to keep... |
def get_child_repository_ids(self, repository_id):
"""Gets the ``Ids`` of the children of the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the repository
raise: NotFound - ``repository_id`` not found
raise:... | Gets the ``Ids`` of the children of the given repository.
arg: repository_id (osid.id.Id): the ``Id`` to query
return: (osid.id.IdList) - the children of the repository
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: ... |
def _preoptimize_model(self, initials, method):
""" Preoptimizes the model by estimating a static model, then a quick search of good dynamic parameters
Parameters
----------
initials : np.array
A vector of inital values
method : str
One of 'MLE' or 'PML'... | Preoptimizes the model by estimating a static model, then a quick search of good dynamic parameters
Parameters
----------
initials : np.array
A vector of inital values
method : str
One of 'MLE' or 'PML' (the optimization options)
Returns
-------... |
def getcellvalue(self, window_name, object_name, row_index, column=0):
"""
Get cell value
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either fu... | Get cell value
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: string... |
def batched_expiration_maintenance(self, elapsed_time):
""" Batched version of expiration_maintenance()
Cython version
"""
num_iterations = self.num_batched_maintenance(elapsed_time)
self.refresh_head, nonzero = maintenance(self.cellarray, self.nbr_bits, num_iterations, self.... | Batched version of expiration_maintenance()
Cython version |
def normalize_arxiv_category(category):
"""Normalize arXiv category to be schema compliant.
This properly capitalizes the category and replaces the dash by a dot if
needed. If the category is obsolete, it also gets converted it to its
current equivalent.
Example:
>>> from inspire_schemas.u... | Normalize arXiv category to be schema compliant.
This properly capitalizes the category and replaces the dash by a dot if
needed. If the category is obsolete, it also gets converted it to its
current equivalent.
Example:
>>> from inspire_schemas.utils import normalize_arxiv_category
>>... |
def func_from_info(self):
"""Find and return a callable object from a task info dictionary"""
info = self.funcinfo
functype = info['func_type']
if functype in ['instancemethod', 'classmethod', 'staticmethod']:
the_modelclass = get_module_member_by_dottedpath(info['class_path'... | Find and return a callable object from a task info dictionary |
def exit_fullscreen(self):
"""
Invoke before printing out anything.
This method should be replaced by or merged to blessings package
"""
self.term.stream.write(self.term.exit_fullscreen)
self.term.stream.write(self.term.normal_cursor) | Invoke before printing out anything.
This method should be replaced by or merged to blessings package |
def has_successor(self, u, v, t=None):
"""Return True if node u has successor v at time t (optional).
This is true if graph has the edge u->v.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and n... | Return True if node u has successor v at time t (optional).
This is true if graph has the edge u->v.
Parameters
----------
u, v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
t : snapshot id (... |
def one_or_more(
schema: dict, unique_items: bool = True, min: int = 1, max: int = None
) -> dict:
"""
Helper function to construct a schema that validates items matching
`schema` or an array containing items matching `schema`.
:param schema: The schema to use
:param unique_items: Flag if array... | Helper function to construct a schema that validates items matching
`schema` or an array containing items matching `schema`.
:param schema: The schema to use
:param unique_items: Flag if array items should be unique
:param min: Correlates to ``minLength`` attribute of JSON Schema array
:param max: ... |
def _calc_inst_pmf(self):
"""Calculate the epsilon-greedy instrumental distribution"""
# Easy vars
t = self.t_
epsilon = self.epsilon
alpha = self.alpha
preds = self._preds_avg_in_strata
weights = self.strata.weights_[:,np.newaxis]
p1 = self._BB_model.thet... | Calculate the epsilon-greedy instrumental distribution |
def reloadFileAtIndex(self, itemIndex, rtiClass=None):
""" Reloads the item at the index by removing the repo tree item and inserting a new one.
The new item will have by of type rtiClass. If rtiClass is None (the default), the
new rtiClass will be the same as the old one.
"""
... | Reloads the item at the index by removing the repo tree item and inserting a new one.
The new item will have by of type rtiClass. If rtiClass is None (the default), the
new rtiClass will be the same as the old one. |
def update_config(self):
"""
Update the configuration files according to the current
in-memory SExtractor configuration.
"""
# -- Write filter configuration file
# First check the filter itself
filter = self.config['FILTER_MASK']
rows = len(filter)
... | Update the configuration files according to the current
in-memory SExtractor configuration. |
def create_reserved_ip_address(self, name, label=None, location=None):
'''
Reserves an IPv4 address for the specified subscription.
name:
Required. Specifies the name for the reserved IP address.
label:
Optional. Specifies a label for the reserved IP address. The... | Reserves an IPv4 address for the specified subscription.
name:
Required. Specifies the name for the reserved IP address.
label:
Optional. Specifies a label for the reserved IP address. The label
can be up to 100 characters long and can be used for your tracking
... |
def getAllMetadata(self, remote, address):
"""Get all metadata of device"""
if self._server is not None:
return self._server.getAllMetadata(remote, address) | Get all metadata of device |
def detect(self, text):
"""Detect language of the input text
:param text: The source text(s) whose language you want to identify.
Batch detection is supported via sequence input.
:type text: UTF-8 :class:`str`; :class:`unicode`; string sequence (list, tuple, iterator, gener... | Detect language of the input text
:param text: The source text(s) whose language you want to identify.
Batch detection is supported via sequence input.
:type text: UTF-8 :class:`str`; :class:`unicode`; string sequence (list, tuple, iterator, generator)
:rtype: Detected
... |
def _format_disk_metrics(self, metrics):
"""Cast the disk stats to float and convert them to bytes"""
for name, raw_val in metrics.iteritems():
if raw_val:
match = DISK_STATS_RE.search(raw_val)
if match is None or len(match.groups()) != 2:
... | Cast the disk stats to float and convert them to bytes |
def save(self, filename, config):
"""Loads a config from disk"""
return open(os.path.expanduser(filename), 'w').write(json.dumps(config, cls=HCEncoder, sort_keys=True, indent=2, separators=(',', ': '))) | Loads a config from disk |
def to_fixed(stype):
""" Returns the instruction sequence for converting the given
type stored in DE,HL to fixed DE,HL.
"""
output = [] # List of instructions
if is_int_type(stype):
output = to_word(stype)
output.append('ex de, hl')
output.append('ld hl, 0') # 'Truncate' t... | Returns the instruction sequence for converting the given
type stored in DE,HL to fixed DE,HL. |
def clear_history(pymux, variables):
" Clear scrollback buffer. "
pane = pymux.arrangement.get_active_pane()
if pane.display_scroll_buffer:
raise CommandException('Not available in copy mode')
else:
pane.process.screen.clear_history() | Clear scrollback buffer. |
def bids_to_you(self):
'''
Get bids made to you
@return: [[player,owner,team,money,date,datechange,status],]
'''
headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain",'Referer': 'http://'+self.domain+'/team_news.phtml',"User-Agent": user_agent}
... | Get bids made to you
@return: [[player,owner,team,money,date,datechange,status],] |
def train(self, data_iterator):
"""Train a keras model on a worker and send asynchronous updates
to parameter server
"""
feature_iterator, label_iterator = tee(data_iterator, 2)
x_train = np.asarray([x for x, y in feature_iterator])
y_train = np.asarray([y for x, y in lab... | Train a keras model on a worker and send asynchronous updates
to parameter server |
def _detect(self):
""" Detect uninitialized storage variables
Recursively visit the calls
Returns:
dict: [contract name] = set(storage variable uninitialized)
"""
results = []
self.results = []
self.visited_all_paths = {}
for contract in sel... | Detect uninitialized storage variables
Recursively visit the calls
Returns:
dict: [contract name] = set(storage variable uninitialized) |
def ack(self, frame):
"""
Handles the ACK command: Acknowledges receipt of a message.
"""
if not frame.message_id:
raise ProtocolError("No message-id specified for ACK command.")
self.engine.queue_manager.ack(self.engine.connection, frame) | Handles the ACK command: Acknowledges receipt of a message. |
def _non_idempotent_tasks(self, output):
"""
Parses the output to identify the non idempotent tasks.
:param (str) output: A string containing the output of the ansible run.
:return: A list containing the names of the non idempotent tasks.
"""
# Remove blank lines to make... | Parses the output to identify the non idempotent tasks.
:param (str) output: A string containing the output of the ansible run.
:return: A list containing the names of the non idempotent tasks. |
def columns_by_index(self) -> Dict[str, List[Well]]:
"""
Accessor function used to navigate through a labware by column name.
With indexing one can treat it as a typical python dictionary.
To access row A for example,
simply write: labware.columns_by_index()['1']
This wi... | Accessor function used to navigate through a labware by column name.
With indexing one can treat it as a typical python dictionary.
To access row A for example,
simply write: labware.columns_by_index()['1']
This will output ['A1', 'B1', 'C1', 'D1'...].
:return: Dictionary of We... |
def flip_iterable_dict(d: dict) -> dict:
"""Transform dictionary to unpack values to map to respective key."""
value_keys = disjoint_union((cartesian_product((v, k))
for k, v in d.items()))
return dict(value_keys) | Transform dictionary to unpack values to map to respective key. |
def _get_log_covariance(self, log_variance_mat, log_expectation_symbols, covariance_matrix, x, y):
r"""
Compute log covariances according to:\\
:math:`\log{(Cov(x_i,x_j))} = \frac { 1 + Cov(x_i,x_j)}{\exp[\log \mathbb{E}(x_i) + \log \mathbb{E}(x_j)+\frac{1}{2} (\log Var(x_i) + \log Var(x_j)]}`
... | r"""
Compute log covariances according to:\\
:math:`\log{(Cov(x_i,x_j))} = \frac { 1 + Cov(x_i,x_j)}{\exp[\log \mathbb{E}(x_i) + \log \mathbb{E}(x_j)+\frac{1}{2} (\log Var(x_i) + \log Var(x_j)]}`
:param log_variance_mat: a column matrix of log variance
:param log_expectation_symbols: a... |
def _parse_xml(child, parser):
"""Parses the specified child XML tag and creates a Subroutine or
Function object out of it."""
name, modifiers, dtype, kind = _parse_common(child)
#Handle the symbol modification according to the isense settings.
name = _isense_builtin_symbol(name)
if child.tag ... | Parses the specified child XML tag and creates a Subroutine or
Function object out of it. |
def global_env_valid(env):
"""
Given an env, determine if it's a valid "global" or "mgmt" env as listed in EFConfig
Args:
env: the env to check
Returns:
True if the env is a valid global env in EFConfig
Raises:
ValueError with message if the env is not valid
"""
if env not in EFConfig.ACCOUNT_... | Given an env, determine if it's a valid "global" or "mgmt" env as listed in EFConfig
Args:
env: the env to check
Returns:
True if the env is a valid global env in EFConfig
Raises:
ValueError with message if the env is not valid |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.