code stringlengths 81 3.79k |
|---|
def current_docker_container_id():
try:
with open('/proc/1/cgroup', 'r') as readable:
raw = readable.read()
ids = set(re.compile('[0-9a-f]{12,}').findall(raw))
assert len(ids) == 1
return ids.pop()
except:
logging.exception('Failed to obtain current container ... |
def get_student_messaging_for_sis_course_id_and_sis_user_id(
self, sis_user_id, sis_course_id):
url = ("/api/v1/courses/%s/analytics/"
"users/sis_user_id:%s/communication.json") % (
self._sis_id(sis_course_id, sis_field="course"), sis_user_id)
return self._get_reso... |
def create_instance(self, instance_id, configuration_name, node_count,
display_name, project_id=None):
self._apply_to_instance(project_id, instance_id, configuration_name,
node_count, display_name, lambda x: x.create()) |
def remove_action(self, action_name, action_id):
action = self.get_action(action_name, action_id)
if action is None:
return False
action.cancel()
self.actions[action_name].remove(action)
return True |
def update_x(self, x, indices=None):
x = _make_np_bool(x)
if indices is None:
if len(self._x) != len(x):
raise QiskitError("During updating whole x, you can not change "
"the number of qubits.")
self._x = x
else:
... |
def _subgraph_parse(
self, node, pathnode, extra_blocks
):
loose_ends = []
self.tail = node
self.dispatch_list(node.body)
loose_ends.append(self.tail)
for extra in extra_blocks:
self.tail = node
self.dispatch_list(extra.body)
loose_... |
def set_constant(self, name, value):
assert isinstance(name, str) or isinstance(name, sympy.Symbol), \
"constant name needs to be of type str, unicode or a sympy.Symbol"
assert type(value) is int, "constant value needs to be of type int"
if isinstance(name, sympy.Symbol):
... |
def parse(cls, parser, token):
bits, as_var = parse_as_var(parser, token)
tag_name, args, kwargs = parse_token_kwargs(parser, bits, ('template',) + cls.allowed_kwargs, compile_args=cls.compile_args, compile_kwargs=cls.compile_kwargs)
cls.validate_args(tag_name, *args)
return cls(tag_name... |
def resolves_for(self, session):
if self.url:
self.actual_path = session.current_url
else:
result = urlparse(session.current_url)
if self.only_path:
self.actual_path = result.path
else:
request_uri = result.path
... |
def retrieve_image(self, path_to_image):
image = self.storage.open(path_to_image, 'rb')
file_ext = path_to_image.rsplit('.')[-1]
image_format, mime_type = get_image_metadata_from_file_ext(file_ext)
return (
Image.open(image),
file_ext,
image_format,
... |
def create_build_package(package_files):
for package_file in package_files:
if not os.path.exists(package_file):
bot.exit('Cannot find %s.' % package_file)
bot.log('Generating build package for %s files...' % len(package_files))
build_dir = get_tmpdir(prefix="sregistry-build")
build_... |
def step_next_char(self):
self._index += 1
self._col_offset += 1
if self._index > self._maxindex:
self._maxindex = self._index
self._maxcol = self._col_offset
self._maxline = self._lineno |
def _get_cached_arg_spec(fn):
arg_spec = _ARG_SPEC_CACHE.get(fn)
if arg_spec is None:
arg_spec_fn = inspect.getfullargspec if six.PY3 else inspect.getargspec
try:
arg_spec = arg_spec_fn(fn)
except TypeError:
arg_spec = arg_spec_fn(fn.__call__)
_ARG_SPEC_CACHE[fn] = arg_spec
return arg_... |
def export_xhtml(html, filename, image_tag=None):
if image_tag is None:
image_tag = default_image_tag
else:
image_tag = ensure_utf8(image_tag)
with open(filename, 'w') as f:
offset = html.find("<html>")
assert offset > -1, 'Invalid HTML string: no <html> tag.'
html = ... |
def get_properties(self):
return {prop.get_name(): prop.get_value()
for prop in self.properties.values()} |
def get_card(self, card_id, **query_params):
card_json = self.fetch_json(
uri_path=self.base_uri + '/cards/' + card_id
)
return self.create_card(card_json) |
def with_context(exc, context):
if not hasattr(exc, 'context'):
exc.context = {}
exc.context.update(context)
return exc |
def delete(self, url, **kwargs):
check_type(url, basestring, may_be_none=False)
erc = kwargs.pop('erc', EXPECTED_RESPONSE_CODE['DELETE'])
self.request('DELETE', url, erc, **kwargs) |
def effective_sample_size(states,
filter_threshold=0.,
filter_beyond_lag=None,
name=None):
states_was_list = _is_list_like(states)
if not states_was_list:
states = [states]
filter_beyond_lag = _broadcast_maybelist_arg(states, filter... |
def add_requirements(self, metadata_path):
additional = list(self.setupcfg_requirements())
if not additional: return
pkg_info = read_pkg_info(metadata_path)
if 'Provides-Extra' in pkg_info or 'Requires-Dist' in pkg_info:
warnings.warn('setup.cfg requirements overwrite values ... |
def get_module(self, name, node):
for mod in self.modules():
mod_name = mod.node.name
if mod_name == name:
return mod
package = node.root().name
if mod_name == "%s.%s" % (package, name):
return mod
if mod_name == "%s.%s"... |
def copy(self, copy_source, bucket, key, extra_args=None,
subscribers=None, source_client=None):
if extra_args is None:
extra_args = {}
if subscribers is None:
subscribers = []
if source_client is None:
source_client = self._client
self._v... |
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
local_stream = utils.BytearrayStream()
if len(self._credentials) == 0:
raise ValueError("Authentication struct missing credentials.")
for credential in self._credentials:
credential.write(local_strea... |
def dims(x):
if isinstance(x, tf.TensorShape):
return x.dims
r = tf.TensorShape(x).dims
return None if r is None else list(map(tf.compat.dimension_value, r)) |
def ancestral_reconstruction(params):
if assure_tree(params, tmp_dir='ancestral_tmp'):
return 1
outdir = get_outdir(params, '_ancestral')
basename = get_basename(params, outdir)
gtr = create_gtr(params)
aln, ref, fixed_pi = read_if_vcf(params)
is_vcf = True if ref is not None else False
... |
def chain_nac_proxy(chain, sender, contract_address, value=0):
"create an object which acts as a proxy for the contract on the chain"
klass = registry[contract_address].im_self
assert issubclass(klass, NativeABIContract)
def mk_method(method):
def m(s, *args):
data = abi_encode_args(... |
def _generate(self, source, name, filename, defer_init=False):
return generate(source, self, name, filename, defer_init=defer_init) |
def send_json(self, ids=None):
items = ids or self._registration_id
values = {"registration_ids": items}
if self._data is not None:
values["data"] = self._data
for key, val in self._kwargs.items():
if val:
values[key] = val
data = json.dump... |
def compounds(context, case_id):
adapter = context.obj['adapter']
LOG.info("Running scout update compounds")
case_obj = adapter.case(case_id)
if not case_obj:
LOG.warning("Case %s could not be found", case_id)
context.abort()
try:
adapter.update_case_compounds(case_obj)
e... |
def try_passwordless_ssh(server, keyfile, paramiko=None):
if paramiko is None:
paramiko = sys.platform == 'win32'
if not paramiko:
f = _try_passwordless_openssh
else:
f = _try_passwordless_paramiko
return f(server, keyfile) |
def analyze_entities(self, document, encoding_type=None, retry=None, timeout=None, metadata=None):
client = self.get_conn()
return client.analyze_entities(
document=document, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata
) |
def __load_symbol_maps(self):
repo = SymbolMapRepository(self.__get_session())
all_maps = repo.get_all()
self.symbol_maps = {}
for item in all_maps:
self.symbol_maps[item.in_symbol] = item.out_symbol |
def connection(self):
ctx = _app_ctx_stack.top
if ctx is not None:
if not hasattr(ctx, 'mysql_db'):
ctx.mysql_db = self.connect
return ctx.mysql_db |
def multiqc(store, institute_id, case_name):
institute_obj, case_obj = institute_and_case(store, institute_id, case_name)
return dict(
institute=institute_obj,
case=case_obj,
) |
def write_change(change):
action, rrset = change
change_vals = get_change_values(change)
e_change = etree.Element("Change")
e_action = etree.SubElement(e_change, "Action")
e_action.text = action
e_rrset = etree.SubElement(e_change, "ResourceRecordSet")
e_name = etree.SubElement(e_rrset, "Nam... |
def _init_transformer(cls, data):
if isinstance(data, QuantumChannel):
return data
if hasattr(data, 'to_quantumchannel'):
return data.to_channel()
if hasattr(data, 'to_channel'):
return data.to_channel()
return Operator(data) |
def _cmd(self, cmd, *args, **kw):
ok = kw.setdefault('ok', False)
self._wakeup()
if args:
cmd = "%s %s" % (cmd, ' '.join(str(a) for a in args))
for i in xrange(3):
log.info("send: " + cmd)
self.port.write(cmd + '\n')
if ok:
... |
def _index_put(self, idx_name, *ids_and_fcs):
keys = self._index_keys_for(idx_name, *ids_and_fcs)
with_vals = map(lambda k: (k, '0'), keys)
self.kvl.put(self.INDEX_TABLE, *with_vals) |
def valid(schema=None):
def dec(fun):
@wraps(fun)
def d_func(self, ctx, data, *a, **kw):
try:
validate(data['params'], schema)
except ValidationError as err:
raise InvalidParams(err)
except SchemaErro... |
def map_thread_names():
name2id = {}
for thread_id in list(threading._active.keys()):
thread = threading._active[thread_id]
name = thread.getName()
if name not in list(name2id.keys()):
name2id[name] = thread_id
pass
pass
return name2id |
def _choi_to_kraus(data, input_dim, output_dim, atol=ATOL_DEFAULT):
if is_hermitian_matrix(data, atol=atol):
w, v = la.eigh(data)
if len(w[w < -atol]) == 0:
kraus = []
for val, vec in zip(w, v.T):
if abs(val) > atol:
k = np.sqrt(val) * vec.... |
def login(self, *login_args, **login_kwargs):
def decorator(f):
@wraps(f)
def decorated(*args, **kwargs):
self.response = make_response()
adapter = WerkzeugAdapter(request, self.response)
login_kwargs.setdefault('session', session)
... |
def main(args=None):
options, paths = _parse_options(args)
format = getattr(options, 'output', 'simple')
formatter = _FORMATTERS[format](options)
for path in paths:
meta = get_metadata(path, options.metadata_version)
if meta is None:
continue
if options.download_url_p... |
def _get_format_from_document(self, token, document):
code, html = self._formatter._format_lines([(token, u'dummy')]).next()
self._document.setHtml(html)
return QtGui.QTextCursor(self._document).charFormat() |
def authenticate_credentials(self, key):
user, token = super(TokenAuthentication, self).authenticate_credentials(key)
if token.expires < timezone.now():
msg = _('Token has expired.')
raise exceptions.AuthenticationFailed(msg)
token.update_expiry()
return (user, to... |
def parse_issues(raw_page):
raw_issues = json.loads(raw_page)
issues = raw_issues['issues']
for issue in issues:
yield issue |
def nice_pair(pair):
start, end = pair
if start == end:
return "%d" % start
else:
return "%d-%d" % (start, end) |
def comments(self):
record_numbers = range(2, self.fward)
if not record_numbers:
return ''
data = b''.join(self.read_record(n)[0:1000] for n in record_numbers)
try:
return data[:data.find(b'\4')].decode('ascii').replace('\0', '\n')
except IndexError:
... |
def quadrature_scheme_lognormal_quantiles(
loc, scale, quadrature_size,
validate_args=False, name=None):
with tf.name_scope(name or "quadrature_scheme_lognormal_quantiles"):
dist = transformed_distribution.TransformedDistribution(
distribution=normal.Normal(loc=loc, scale=scale),
bijector=... |
def do_debug(self, args):
if not args:
self.help_fn("What information would you like: data, sys?")
return ERR
for info in args:
if info == 'sys':
print("-- sys ----------------------------------------")
for line in info_formatter(self.c... |
def read_channel(self):
channel, message = self.protocol.channel_layer.receive_many([u'slack.send'], block=False)
delay = 0.1
if channel:
self.protocols[0].sendSlack(message)
reactor.callLater(delay, self.read_channel) |
def check_range(self, j):
if isinstance(j, int):
if j < 0 or j >= self.size:
raise QiskitIndexError("register index out of range")
elif isinstance(j, slice):
if j.start < 0 or j.stop >= self.size or (j.step is not None and
... |
def split_vert_on_nonmanifold_face(script, vert_displacement_ratio=0.0):
filter_xml = ''.join([
' <filter name="Split Vertexes Incident on Non Manifold Faces">\n',
' <Param name="VertDispRatio" ',
'value="{}" '.format(vert_displacement_ratio),
'description="Vertex Displacement Ra... |
def strsplit(self, pattern):
fr = H2OFrame._expr(expr=ExprNode("strsplit", self, pattern))
fr._ex._cache.nrows = self.nrow
return fr |
def setup_cmd_parser(cls):
parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES,
from_date=True,
token_auth=True,
archive=True)
action = parser.parser._option... |
def _step_decorator_args(self, decorator):
args = decorator.children[3:-2]
step = None
if len(args) == 1:
try:
step = ast.literal_eval(args[0].get_code())
except (ValueError, SyntaxError):
pass
if isinstance(step, six.string_typ... |
def _create_prefix(self, dirname):
if dirname in ('.', '/'):
dirname = ''
prefix = os.path.join(self._bucket_root, dirname)
prefix = prefix.rstrip('/')
return prefix |
def build_fake_input_fns(batch_size):
num_words = 1000
vocabulary = [str(i) for i in range(num_words)]
random_sample = np.random.randint(
10, size=(batch_size, num_words)).astype(np.float32)
def train_input_fn():
dataset = tf.data.Dataset.from_tensor_slices(random_sample)
dataset = dataset.batch(b... |
def voronoi(script, hole_num=50, target_layer=None, sample_layer=None, thickness=0.5, backward=True):
if target_layer is None:
target_layer = script.current_layer()
if sample_layer is None:
sampling.poisson_disk(script, sample_num=hole_num)
sample_layer = script.last_layer()
vert_col... |
def get_ip(self):
if len(self.client_nodes) > 0:
node = self.client_nodes[0]
else:
node = self.nodes[0]
return node.get_ip() |
def project_dict(self, project_name, token_name, public):
project_dict = {}
project_dict['project_name'] = project_name
if token_name is not None:
if token_name == '':
project_dict['token_name'] = project_name
else:
project_dict['token_name... |
def extract_tar(archive, output_folder, handle_whiteout=False):
from .terminal import run_command
if handle_whiteout is True:
return _extract_tar(archive, output_folder)
args = '-xf'
if archive.endswith(".tar.gz"):
args = '-xzf'
command = ["tar", args, archive, "-C", output_folder, "... |
def walk_upgrade_domain(self, service_name, deployment_name,
upgrade_domain):
_validate_not_none('service_name', service_name)
_validate_not_none('deployment_name', deployment_name)
_validate_not_none('upgrade_domain', upgrade_domain)
return self._perform_post... |
def get_clinvar_id(self, submission_id):
submission_obj = self.clinvar_submission_collection.find_one({'_id': ObjectId(submission_id)})
clinvar_subm_id = submission_obj.get('clinvar_subm_id')
return clinvar_subm_id |
def generate_sentence(self, chain):
def weighted_choice(choices):
total_weight = sum(weight for val, weight in choices)
rand = random.uniform(0, total_weight)
upto = 0
for val, weight in choices:
if upto + weight >= rand:
return... |
def _bisect(value_and_gradients_function, initial_args, f_lim):
def _loop_cond(curr):
return ~tf.reduce_all(input_tensor=curr.stopped)
def _loop_body(curr):
mid = value_and_gradients_function((curr.left.x + curr.right.x) / 2)
failed = (curr.failed | ~is_finite(mid) |
tf.equal(mid.x, curr.l... |
def from_pandas(df, name="pandas", copy_index=True, index_name="index"):
import six
vaex_df = vaex.dataframe.DataFrameArrays(name)
def add(name, column):
values = column.values
try:
vaex_df.add_column(name, values)
except Exception as e:
print("could not conve... |
def network_to_pandas_hdf5(network, filename, rm_nodes=None):
if rm_nodes is not None:
nodes, edges = remove_nodes(network, rm_nodes)
else:
nodes, edges = network.nodes_df, network.edges_df
with pd.HDFStore(filename, mode='w') as store:
store['nodes'] = nodes
store['edges'] =... |
def build_all_iop(self):
lg.info('Building all b and c from IOPs')
self.build_a()
self.build_bb()
self.build_b()
self.build_c() |
def _put_information(self):
self.session._add_object()
self.session._out('<<')
self.session._out('/Producer ' + self._text_to_string(
'PDFLite, https://github.com/katerina7479'))
if self.title:
self.session._out('/Title ' + self._text_to_string(self.title))
... |
def get_rendition_key_set(key):
try:
rendition_key_set = IMAGE_SETS[key]
except KeyError:
raise ImproperlyConfigured(
"No Rendition Key Set exists at "
"settings.VERSATILEIMAGEFIELD_RENDITION_KEY_SETS['{}']".format(key)
)
else:
return validate_versatil... |
def _method_magic_marker(magic_kind):
validate_type(magic_kind)
def magic_deco(arg):
call = lambda f, *a, **k: f(*a, **k)
if callable(arg):
func = arg
name = func.func_name
retval = decorator(call, func)
record_magic(magics, magic_kind, name, name)... |
def get_management_certificate(self, thumbprint):
_validate_not_none('thumbprint', thumbprint)
return self._perform_get(
'/' + self.subscription_id + '/certificates/' + _str(thumbprint),
SubscriptionCertificate) |
def key_wrapping_data(self):
key_wrapping_data = {}
encryption_key_info = {
'unique_identifier': self._kdw_eki_unique_identifier,
'cryptographic_parameters': {
'block_cipher_mode': self._kdw_eki_cp_block_cipher_mode,
'padding_method': self._kdw_eki... |
def encode(self):
header = bytearray(1)
varHeader = bytearray()
payload = bytearray()
header[0] = 0x10
varHeader.extend(encodeString(self.version['tag']))
varHeader.append(self.version['level'])
flags = (self.cleanStart << 1)
if self.willTopic is n... |
def stop_step(self, step_name):
if self.finished is not None:
raise AlreadyFinished()
steps = copy.deepcopy(self.steps)
step_data = self._get_step(step_name, steps=steps)
if step_data is None:
raise StepNotStarted()
elif 'stop' in step_data:
ra... |
def _notebook_model_from_db(self, record, content):
path = to_api_path(record['parent_name'] + record['name'])
model = base_model(path)
model['type'] = 'notebook'
model['last_modified'] = model['created'] = record['created_at']
if content:
content = reads_base64(recor... |
def signal_kernel(self, signum):
if self.has_kernel:
self.kernel.send_signal(signum)
else:
raise RuntimeError("Cannot signal kernel. No kernel is running!") |
def _header_constructor(
cls, data_to_print, header_separator="-", column_separator=" "
):
header_data = []
header_size = ""
before_size = "%-"
after_size = "s"
if header_separator:
header_separator_data = []
length_data_to_print = len(data_to_prin... |
def equals_order_sensitive(self, other):
if not isinstance(other, Mapping) or len(self) != len(other):
return False
return all(i == j for (i, j) in izip(iteritems(self), iteritems(other))) |
def admin_footer(parser, token):
tag_name = token.split_contents()
if len(tag_name) > 1:
raise base.TemplateSyntaxError(
'{} tag does not accept any argument(s): {}'.format(
token.contents.split()[0],
', '.join(token.contents.split()[1:])
))
return AdminFooter... |
def next(self):
val = self._current
self._current = self.readfunc()
return val |
def create(self):
if not os.path.exists(self.path):
open(self.path, 'a').close()
else:
raise Exception("File exists: {}".format(self.path)) |
def write_json_report(sample_id, data1, data2):
parser_map = {
"base_sequence_quality": ">>Per base sequence quality",
"sequence_quality": ">>Per sequence quality scores",
"base_gc_content": ">>Per sequence GC content",
"base_n_content": ">>Per base N content",
"sequence_leng... |
def case_insensitive(self, fields_dict):
if hasattr(self.model, 'CASE_INSENSITIVE_FIELDS'):
for field in self.model.CASE_INSENSITIVE_FIELDS:
if field in fields_dict:
fields_dict[field + '__iexact'] = fields_dict[field]
del fields_dict[field] |
def trim_data_back_to(monthToKeep):
global g_failed_tests_info_dict
current_time = time.time()
oldest_time_allowed = current_time - monthToKeep*30*24*3600
clean_up_failed_test_dict(oldest_time_allowed)
clean_up_summary_text(oldest_time_allowed) |
def fill(self, doc_contents):
for key, content in doc_contents.items():
doc_contents[key] = replace_chars_for_svg_code(content)
return super(SVGDocument, self).fill(doc_contents=doc_contents) |
def In(sigOrVal, iterable):
res = None
for i in iterable:
i = toHVal(i)
if res is None:
res = sigOrVal._eq(i)
else:
res = res | sigOrVal._eq(i)
assert res is not None, "Parameter iterable is empty"
return res |
def set_client_certificate(self, certificate):
_certificate = BSTR(certificate)
_WinHttpRequest._SetClientCertificate(self, _certificate) |
def validate_chunks(self, chunks):
starts = set([ch.byte for ch in chunks])
for ch in chunks:
assert all([(ex in starts or ex < 0) for ex in ch.exits]) |
def get_external_tools_in_account(self, account_id, params={}):
url = ACCOUNTS_API.format(account_id) + "/external_tools"
external_tools = []
for data in self._get_paged_resource(url, params=params):
external_tools.append(data)
return external_tools |
def execute(self, context):
self.hook = DiscordWebhookHook(
self.http_conn_id,
self.webhook_endpoint,
self.message,
self.username,
self.avatar_url,
self.tts,
self.proxy
)
self.hook.execute() |
def make_dict_observable(matrix_observable):
dict_observable = {}
observable = np.array(matrix_observable)
observable_size = len(observable)
observable_bits = int(np.ceil(np.log2(observable_size)))
binary_formater = '0{}b'.format(observable_bits)
if observable.ndim == 2:
observable = obs... |
def _check_wiremap_validity(self, wire_map, keymap, valmap):
for k, v in wire_map.items():
kname = "%s[%d]" % (k[0].name, k[1])
vname = "%s[%d]" % (v[0].name, v[1])
if k not in keymap:
raise DAGCircuitError("invalid wire mapping key %s" % kname)
if... |
def pformat_dict_summary_html(dict):
if not dict:
return ' {}'
html = []
for key, value in sorted(six.iteritems(dict)):
if not isinstance(value, DICT_EXPANDED_TYPES):
value = '...'
html.append(_format_dict_item(key, value))
return mark_safe(u'<br/>'.join(html)) |
def selected(self):
if self._selected:
return self._selected if self.asc else \
"-{0}".format(self._selected)
return None |
def _helpful_failure(method):
@wraps(method)
def wrapper(self, val):
try:
return method(self, val)
except:
exc_cls, inst, tb = sys.exc_info()
if hasattr(inst, '_RERAISE'):
_, expr, _, inner_val = Q.__debug_info__
Q.__debug_info_... |
def _add_to_tree(self, start_node, split_names, type_name, group_type_name,
instance, constructor, args, kwargs):
try:
act_node = start_node
last_idx = len(split_names) - 1
add_link = type_name == LINK
link_added = False
for idx, n... |
def tostring(self):
root = self.as_element()
indent(root)
txt = ET.tostring(root, encoding="utf-8")
txt = re.sub(r'_[A-Z]_','',txt)
txt = '<?xml version="1.0" encoding="utf-8"?>\n' + txt
return txt |
def __head(self,h):
return '%s%s%s' % (self.color_table.active_colors.header,h,
self.color_table.active_colors.normal) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.