code
stringlengths
81
3.79k
def convert_to_this_nbformat(nb, orig_version=2, orig_minor=0): if orig_version == 1: nb = v2.convert_to_this_nbformat(nb) orig_version = 2 if orig_version == 2: nb.nbformat = nbformat nb.nbformat_minor = nbformat_minor nb.orig_nbformat = 2 return nb elif orig...
def sync_folder(self, path, bucket): bucket = self.conn.get_bucket(bucket) local_files = self._get_local_files(path) s3_files = self._get_s3_files(bucket) for filename, hash in local_files.iteritems(): s3_key = s3_files[filename] if s3_key is None: ...
def wrap_class(cls, error_threshold=None): methods = inspect.getmembers(cls, inspect.ismethod) + inspect.getmembers(cls, inspect.isfunction) for method_name, method in methods: wrapped_method = flawless.client.client._wrap_function_with_error_decorator( method if not im_self(method) else im_...
def check_type(self, value, attr, data): root_value = super(InstructionParameter, self).check_type( value, attr, data) if is_collection(value): _ = [super(InstructionParameter, self).check_type(item, attr, data) for item in value] return root_value
def _batch_gather_with_broadcast(params, indices, axis): leading_bcast_shape = tf.broadcast_dynamic_shape( tf.shape(input=params)[:axis], tf.shape(input=indices)[:-1]) params += tf.zeros( tf.concat((leading_bcast_shape, tf.shape(input=params)[axis:]), axis=0), dtype=params.dtype) indices +...
def establish(self, call_id, timeout, limit=None, retry=None, max_retries=None): rejected = 0 retried = 0 results = [] result_queue = self.result_queues[call_id] try: with Timeout(timeout, False): while True: resul...
def slicify(slc, dim): if isinstance(slc, slice): start = 0 if slc.start is None else slc.start stop = dim if slc.stop is None else slc.stop step = 1 if slc.step is None else slc.step if start < 0: start += dim if stop < 0: stop += dim if step > 0: if star...
def enable_gtk(self, app=None): import gtk try: gtk.set_interactive(True) self._current_gui = GUI_GTK except AttributeError: from IPython.lib.inputhookgtk import inputhook_gtk self.set_inputhook(inputhook_gtk) self._current_gui = GUI_GT...
def overlapping(self, variant_obj): category = 'snv' if variant_obj['category'] == 'sv' else 'sv' query = { '$and': [ {'case_id': variant_obj['case_id']}, {'category': category}, {'hgnc_ids' : { '$in' : variant_obj['hgnc_ids']}} ] ...
def convert_camel_case(name): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def incpos(self, length: int=1) -> int: if length < 0: raise ValueError("length must be positive") i = 0 while (i < length): if self._cursor.index < self._len: if self.peek_char == '\n': self._cursor.step_next_line() sel...
def cache(func): CACHE_DIR = appdirs.user_cache_dir('sportsref', getpass.getuser()) if not os.path.isdir(CACHE_DIR): os.makedirs(CACHE_DIR) @funcutils.wraps(func) def wrapper(url): file_hash = hashlib.md5() encoded_url = url.encode(errors='replace') file_hash.update(encod...
def onTWriteCallback__init(self, sim): yield from self.onTWriteCallback(sim) self.intf.t._sigInside.registerWriteCallback( self.onTWriteCallback, self.getEnable) self.intf.o._sigInside.registerWriteCallback( self.onTWriteCallback, self.getEnable)
def delims(self, delims): expr = '[' + ''.join('\\'+ c for c in delims) + ']' self._delim_re = re.compile(expr) self._delims = delims self._delim_expr = expr
def ALL_mentions(target_mentions, chain_mentions): found_all = True for name in target_mentions: found_one = False for chain_ment in chain_mentions: if name in chain_ment: found_one = True break if not found_one: found_all = False ...
def execute(self, obj): try: if self.config.stdin: self.spawn(self.config.command, stdin_content=obj, stdin=True, timeout=1) else: if "@@" not in self.config.command: raise PJFMissingArgument("Missing @@ filename indicator while usi...
def can_cut(self): cursor = self._control.textCursor() return (cursor.hasSelection() and self._in_buffer(cursor.anchor()) and self._in_buffer(cursor.position()))
def get_importer(path_item): try: importer = sys.path_importer_cache[path_item] except KeyError: for hook in sys.path_hooks: try: importer = hook(path_item) except ImportError: pass else: break else: ...
def create_resized_image(self, path_to_image, save_path_on_storage, width, height): image, file_ext, image_format, mime_type = self.retrieve_image( path_to_image ) image, save_kwargs = self.preprocess(image, image_format) imagefile = self.process_...
def predict_on_stream(config: Union[str, Path, dict], batch_size: int = 1, file_path: Optional[str] = None) -> None: if file_path is None or file_path == '-': if sys.stdin.isatty(): raise RuntimeError('To process data from terminal please use interact mode') f = sys.stdin else: ...
def paths(input_dir): 'yield all file paths under input_dir' for root, dirs, fnames in os.walk(input_dir): for i_fname in fnames: i_path = os.path.join(root, i_fname) yield i_path
def evaluate_min_coverage(coverage_opt, assembly_coverage, assembly_size): if coverage_opt == "auto": min_coverage = (assembly_coverage / assembly_size) * .3 logger.info("Minimum assembly coverage automatically set to: " "{}".format(min_coverage)) if min_coverage < 10: ...
def dbgr(self, string): print('') self.proc.cmd_queue.append(string) self.proc.process_command() return
def register(self, contract): "registers NativeContract classes" assert issubclass(contract, NativeContractBase) assert len(contract.address) == 20 assert contract.address.startswith(self.native_contract_address_prefix) if self.native_contracts.get(contract.address) == contract._...
def assert_no_title(self, title, **kwargs): query = TitleQuery(title, **kwargs) @self.synchronize(wait=query.wait) def assert_no_title(): if query.resolves_for(self): raise ExpectationNotMet(query.negative_failure_message) return True return assert...
def IsNotNone(*fields, default=None): when_clauses = [ expressions.When( ~expressions.Q(**{field: None}), then=expressions.F(field) ) for field in reversed(fields) ] return expressions.Case( *when_clauses, default=expressions.Value(default), ...
def jenkins_request_with_headers(jenkins_server, req): try: response = jenkins_server.jenkins_request(req) response_body = response.content response_headers = response.headers if response_body is None: raise jenkins.EmptyResponseException( "Error communica...
def _left_doubling_increments(batch_shape, max_doublings, step_size, seed=None, name=None): with tf.compat.v1.name_scope(name, 'left_doubling_increments', [batch_shape, max_doublings, step_size]): step_size = tf.convert_to_tensor(value=step_size) dt...
def _joint_sample_n(self, n, seed=None): with tf.name_scope("sample_n_joint"): stream = seed_stream.SeedStream( seed, salt="LinearGaussianStateSpaceModel_sample_n_joint") sample_and_batch_shape = distribution_util.prefer_static_value( tf.concat([[n], self.batch_shape_tensor()], ...
def simple_attention(memory, att_size, mask, keep_prob=1.0, scope="simple_attention"): with tf.variable_scope(scope): BS, ML, MH = tf.unstack(tf.shape(memory)) memory_do = tf.nn.dropout(memory, keep_prob=keep_prob, noise_shape=[BS, 1, MH]) logits = tf.layers.dense(tf.layers.dense(memory_do, ...
def _dump_text(self): results = self._relay_output['result']; for l in results: dt = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(int(l[1]['ts']))) print("{0} {1} {2} {3}".format(l[0], dt, l[1]['type'], l[1]['msg']))
def get_ip_address_info(ip_address, cache=None, nameservers=None, timeout=2.0, parallel=False): ip_address = ip_address.lower() if cache: info = cache.get(ip_address, None) if info: return info info = OrderedDict() info["ip_address"] = ip_address r...
def _explore(self, explore_iterable): if self.v_locked: raise pex.ParameterLockedException('Parameter `%s` is locked!' % self.v_full_name) if self.f_has_range(): raise TypeError('Your parameter `%s` is already explored, ' 'cannot _explore it further!' ...
async def fetch(self) -> Response: if self.request_config.get('DELAY', 0) > 0: await asyncio.sleep(self.request_config['DELAY']) timeout = self.request_config.get('TIMEOUT', 10) try: async with async_timeout.timeout(timeout): resp = await self._make_reques...
def _prm_read_dictionary(self, leaf, full_name): try: temp_table = self._prm_read_table(leaf, full_name) temp_dict = temp_table.to_dict('list') innder_dict = {} for innerkey, vallist in temp_dict.items(): innder_dict[innerkey] = vallist[0] ...
def _validate_value(key, value, expected_type): if not isinstance(value, expected_type): raise TypeError("{} argument must have a type {} not {}".format( key, expected_type, type(value)))
def unified_file(self): if ( "file_to_test" in PyFunceble.INTERN and PyFunceble.INTERN["file_to_test"] and PyFunceble.CONFIGURATION["unified"] ): output = ( self.output_parent_dir + PyFunceble.OUTPUTS["default_files"]["results"] ...
def print_table(language): table = translation_table(language) for code, name in sorted(table.items(), key=operator.itemgetter(0)): print(u'{language:<8} {name:\u3000<20}'.format( name=name, language=code )) return None
def yzy_to_zyz(xi, theta1, theta2, eps=1e-9): quaternion_yzy = quaternion_from_euler([theta1, xi, theta2], 'yzy') euler = quaternion_yzy.to_zyz() quaternion_zyz = quaternion_from_euler(euler, 'zyz') out_angles = (euler[1], euler[0], euler[2]) abs_inner = abs(quaternion_zyz.data.d...
def select_lasso(self, expression_x, expression_y, xsequence, ysequence, mode="replace", name="default", executor=None): def create(current): return selections.SelectionLasso(expression_x, expression_y, xsequence, ysequence, current, mode) self._selection(create, name, executor=executor)
def resize_to(self, width, height): self.driver.resize_window_to(self.handle, width, height)
def merge(self, new_dict): actions = new_dict.pop("actions") for action in actions: self.add_action(action) self.__dict__.update(new_dict)
def _run_writers(self, start_count, next_idx, sources, i_str, t_path): name_info = dict( first=start_count, source=sources.pop(), ) all_o_paths = [] for writer in self.writers: logger.debug('running %r on %r: %r', writer, i_str, name_info) ...
def _height_is_big_enough(image, height): if height > image.size[1]: raise ImageSizeError(image.size[1], height)
def add_enrichr_parser(subparsers): argparser_enrichr = subparsers.add_parser("enrichr", help="Using Enrichr API to perform GO analysis.") enrichr_opt = argparser_enrichr.add_argument_group("Input arguments") enrichr_opt.add_argument("-i", "--input-list", action="store", dest="gene_list", type=str, required...
def strval(node, outermost=True): if not isinstance(node, element): return node.xml_value if outermost else [node.xml_value] accumulator = [] for child in node.xml_children: if isinstance(child, text): accumulator.append(child.xml_value) elif isinstance(child, element): ...
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._usage_limits_count...
def apply_operation_back(self, op, qargs=None, cargs=None, condition=None): qargs = qargs or [] cargs = cargs or [] all_cbits = self._bits_in_condition(condition) all_cbits.extend(cargs) self._check_condition(op.name, condition) self._check_bits(qargs, self.output_map) ...
def configure(self, options, config): log.debug("Configuring plugins") self.config = config cfg = PluginProxy('configure', self._plugins) cfg(options, config) enabled = [plug for plug in self._plugins if plug.enabled] self.plugins = enabled self.sort() log...
def s2n(self): M_N = 8.0713171 f = lambda parent, daugther: -parent + daugther + 2 * M_N return self.derived('s2n', (0, -2), f)
async def wait_changed(self): if not self.is_complete(): waiter = self._loop.create_future() self._waiters.append(waiter) await waiter
def add_virtual_columns_proper_motion2vperpendicular(self, distance="distance", pm_long="pm_l", pm_lat="pm_b", vl="vl", vb="vb", propagate_uncertainties=False, r...
def _build_trainable_posterior(param, initial_loc_fn): loc = tf.compat.v1.get_variable( param.name + '_loc', initializer=lambda: initial_loc_fn(param), dtype=param.prior.dtype, use_resource=True) scale = tf.nn.softplus( tf.compat.v1.get_variable( param.name + '_scale', ...
def poll(self): service = yield self.get_service() if not service: self.log.warn("Docker service not found") return 0 task_filter = {'service': service['Spec']['Name']} tasks = yield self.docker( 'tasks', task_filter ) running_task = No...
def var(self, axis=None, keepdims=False): return self._stat(axis, name='variance', keepdims=keepdims)
def get_ids(self, features, threshold=0.0, func=np.sum, get_weights=False): if isinstance(features, str): features = [features] features = self.search_features(features) feature_weights = self.data.ix[:, features] weights = feature_weights.apply(func, 1) above_thresh ...
def is_error(node: astroid.node_classes.NodeNG) -> bool: for child_node in node.get_children(): if isinstance(child_node, astroid.Raise): return True return False
def _sentences(self, clean_visible): 'generate strings identified as sentences' previous_end = 0 clean_visible = clean_visible.decode('utf8') for start, end in self.sentence_tokenizer.span_tokenize(clean_visible): if start < previous_end: start = previous_end ...
def set_default_bg(): term = environ.get('TERM', None) if term: if (term.startswith('xterm',) or term.startswith('eterm') or term == 'dtterm'): return False return True
def run(self, data_loaders, workflow, max_epochs, **kwargs): assert isinstance(data_loaders, list) assert mmcv.is_list_of(workflow, tuple) assert len(data_loaders) == len(workflow) self._max_epochs = max_epochs work_dir = self.work_dir if self.work_dir is not None else 'NONE' ...
def glance_process(body, message): event_type = body['event_type'] process = glance_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in glance_customer_process_wildcard.keys(): ...
def pre_build(self, traj, brian_list, network_dict): self._pre_build = not _explored_parameters_in_group(traj, traj.parameters.connections) self._pre_build = (self._pre_build and 'neurons_i' in network_dict and 'neurons_e' in network_dict) if self._pre_build: ...
def _create_idx_from_stream(self, stream): stream_iter = iter(stream) dimension = self.properties.dimension darray = ctypes.c_double * dimension mins = darray() maxs = darray() no_data = ctypes.cast(ctypes.pointer(ctypes.c_ubyte(0)), ctypes.P...
def isAcquired(self, lockID): return self.__lockImpl.isAcquired(lockID, self.__selfID, time.time())
def fill_heatmap(self): for module_path, lineno, runtime in self.lines_without_stdlib: self._execution_count[module_path][lineno] += 1 self._heatmap[module_path][lineno] += runtime
def parse_unstruct(unstruct): my_json = json.loads(unstruct) data = my_json['data'] schema = data['schema'] if 'data' in data: inner_data = data['data'] else: raise SnowplowEventTransformationException(["Could not extract inner data field from unstructured event"]) fixed_schema =...
def volume_percentage_used(self, volume): volume = self._get_volume(volume) if volume is not None: total = int(volume["size"]["total"]) used = int(volume["size"]["used"]) if used is not None and used > 0 and \ total is not None and total > 0: ...
def _sample_3d(self, n, seed=None): seed = seed_stream.SeedStream(seed, salt='von_mises_fisher_3d') u_shape = tf.concat([[n], self._batch_shape_tensor()], axis=0) z = tf.random.uniform(u_shape, seed=seed(), dtype=self.dtype) safe_conc = tf.where(self.concentration > 0, self.conc...
def find_on_path(importer, path_item, only=False): path_item = _normalize_cached(path_item) if os.path.isdir(path_item) and os.access(path_item, os.R_OK): if path_item.lower().endswith('.egg'): yield Distribution.from_filename( path_item, metadata=PathMetadata( ...
def lists(self, pattern: str = None) -> List[WikiList]: return [ lst for arg in self.arguments for lst in arg.lists(pattern) if lst]
def make_simple_step_size_update_policy(num_adaptation_steps, target_rate=0.75, decrement_multiplier=0.01, increment_multiplier=0.01, step_counter=None): if s...
def _update_status(self): srun, scomp, sdead = self._s_running, self._s_completed, self._s_dead running, completed, dead = self._running, self._completed, self._dead for num, job in enumerate(running): stat = job.stat_code if stat == srun: continue ...
def headers_present(self, headers): headers = {name: re.compile('(.*)') for name in headers} self.add_matcher(matcher('HeadersMatcher', headers))
def download_csv(data, filename): assert_is_type(data, H2OFrame) assert_is_type(filename, str) url = h2oconn.make_url("DownloadDataset", 3) + "?frame_id={}&hex_string=false".format(data.frame_id) with open(filename, "wb") as f: f.write(urlopen()(url).read())
def _bind_parameter(self, parameter, value): for (instr, param_index) in self._parameter_table[parameter]: instr.params[param_index] = value
def exclude_downhole(filt, threshold=2): cfilt = filt.copy() inds = bool_2_indices(~filt) rem = (np.diff(inds) >= threshold)[:, 0] if any(rem): if inds[rem].shape[0] > 1: limit = inds[rem][1, 0] cfilt[limit:] = False return cfilt
def init_module(self, run_object): self.profile = self.profile_module self._run_object, _, self._run_args = run_object.partition(' ') self._object_name = '%s (module)' % self._run_object self._globs = { '__file__': self._run_object, '__name__': '__main__', ...
def _main(self, fileobj, data, offset): fileobj.seek(offset) fileobj.write(data)
def encode(self, txt): return list(self._fwd_index.get(c, 0) for c in txt)
def decrement(self): with self._lock: if self._count == 0: raise RuntimeError( 'Counter is at zero. It cannot dip below zero') self._count -= 1 if self._is_finalized and self._count == 0: self._callback()
def _initialize_slots(self, seed, hashvalues): self.seed = seed self.hashvalues = self._parse_hashvalues(hashvalues)
def get(self, *args, **kwargs): if 'pk' in kwargs: kwargs['parent'] = kwargs['pk'] kwargs['head'] = True del kwargs['pk'] if 'request' in kwargs: request = kwargs['request'] version = request.GET.get('version', None) preview_id = request.GET...
def withIndent(self, indent=1): ctx = copy(self) ctx.indent += indent return ctx
def fix_header(filename, keyword, new_value): hd = read_header(filename) hi = read_header(filename, return_idxs=True) idx = hi[keyword] dtype = header_keyword_types[keyword] dtype_to_type = {b'<l' : np.int32, b'str' : bytes, b'<d' : np.float64, ...
def _match_one(self, rec, tests): for key,test in tests.iteritems(): if not test(rec.get(key, None)): return False return True
def as_action_description(self): description = { self.name: { 'href': self.href_prefix + self.href, 'timeRequested': self.time_requested, 'status': self.status, }, } if self.input is not None: description[self.na...
def parse(url): config = {} if not isinstance(url, six.string_types): url = '' url = urlparse.urlparse(url) path = url.path[1:] path = path.split('?', 2)[0] config.update({ 'NAME': path, 'USER': url.username, 'PASSWORD': url.password, 'HOST': url.hostname,...
def get_table_content(self, table): result = [[]] cols = table.cols for cell in self.compute_content(table): if cols == 0: result.append([]) cols = table.cols cols -= 1 result[-1].append(cell) while len(result[-1]) < col...
def connect(self): SCOPES = 'https://www.googleapis.com/auth/drive' store = file.Storage('drive_credentials.json') creds = store.get() if not creds or creds.invalid: try: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) except In...
def get_type(self, type_name): type_name = self._canonicalize_type(type_name) if str(type_name) == 'int': type_name = 'integer' elif str(type_name) == 'str': type_name = 'string' elif str(type_name) == 'dict': type_name = 'basic_dict' if self.i...
def create(self): if self.dirname and not os.path.exists(self.dirname): os.makedirs(self.dirname)
def add_server(self, hostname, port, use_ssl, tls_ctx=None): if not use_ssl and tls_ctx: raise ValueError("Cannot specify a TLS context and not use SSL!") server = ldap3.Server( hostname, port=port, use_ssl=use_ssl, tls=tls_ctx ) ...
def get_ref_annotation_data_after_time(self, id_tier, time): befores = self.get_ref_annotation_data_between_times( id_tier, time, self.get_full_time_interval()) if befores: return [min(befores, key=lambda x: x[0])] else: return []
def import_data( self, resource_group_name, name, files, format=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._import_data_initial( resource_group_name=resource_group_name, name=name, files=files, format=for...
def flowshow(flow, win_name='', wait_time=0): flow = flowread(flow) flow_img = flow2rgb(flow) imshow(rgb2bgr(flow_img), win_name, wait_time)
def duplicate(self, new_parent=None): "Create a new object exactly similar to self" kwargs = {} for spec_name, spec in self._meta.specs.items(): value = getattr(self, spec_name) if isinstance(value, Color): print "COLOR", value, value.default ...
def option_attrname(self, opt, optdict=None): if optdict is None: optdict = self.get_option_def(opt) return optdict.get("dest", opt.replace("-", "_"))
def set_issuer(self, issuer): self._set_name(_lib.X509_set_issuer_name, issuer) self._issuer_invalidator.clear()
def search(self, query, verbose=0): if verbose > 0: print("searching " + query) query = query.lower() qgram = ng(query, self.slb) qocument = set() for q in qgram: if q in self.ngrams.keys(): for i in self.ngrams[q]: qocu...
def layers(self): graph_layers = self.multigraph_layers() try: next(graph_layers) except StopIteration: return def add_nodes_from(layer, nodes): layer._multi_graph.add_nodes_from(nodes) for graph_layer in graph_layers: op_nodes = [n...