code
stringlengths
81
3.79k
def formalize(self): source_class = self.source_link.to_metaclass target_class = self.target_link.to_metaclass source_class.referential_attributes |= set(self.source_keys) target_class.identifying_attributes |= set(self.target_keys) def fget(inst, ref_name, alt_prop): ...
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_stream, kmip_version=kmip_version ) if self._cryptographic_para...
def get_imap_capabilities(server): capabilities = list(map(str, list(server.capabilities()))) for i in range(len(capabilities)): capabilities[i] = str(capabilities[i]).replace("b'", "").replace("'", ...
def get_prices(self, date: str, currency: str) -> List[PriceModel]: from .repositories import PriceRepository session = self.session repo = PriceRepository(session) query = repo.query if date: query = query.filter(dal.Price.date == date) if currency: ...
def _handle_display_data(self, msg): self.log.debug("display: %s", msg.get('content', '')) if not self._hidden and self._is_from_this_session(msg): source = msg['content']['source'] data = msg['content']['data'] metadata = msg['content']['metadata'] if dat...
def check_type(self, value): if self.__dict__['dtype'] is None: return elif value is None: return elif isinstance(value, self.__dict__['dtype']): return msg = "Value of type %s, when %s was expected." % ( type(value), self.__dict__['dtype']...
def s3walk(self, basedir, show_dir=None): if not show_dir: show_dir = self.opt.show_dir if basedir[-1] == PATH_SEP: basedir = basedir[0:-1] s3url = S3URL(basedir) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) po...
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): local_buffer = utils.BytearrayStream() if self._unique_identifier: self._unique_identifier.write( local_buffer, kmip_version=kmip_version ) self.length = local_buffer....
def _construct_schema(elements, nsmap): schema = { 'properties': {}, 'geometry': None } schema_key = None gml_key = None if nsmap: for key in nsmap: if nsmap[key] == XS_NAMESPACE: schema_key = key if nsmap[key] in GML_NAMESPACES: ...
def _exit_gracefully(self, signum, frame): self.log.info("Exiting gracefully upon receiving signal %s", signum) self.terminate() self.end() self.log.debug("Finished terminating DAG processors.") sys.exit(os.EX_OK)
def convertArgsToTokens(self, data): tdict = [] tokens = [] d = open(data, 'r') for line in d.readlines(): tdict.append(line.rstrip()) tokens += line.split() d.close() tokens = list(set(tokens)) return tdict, tokens
def _nested_convert_to_tensor(struct, dtype=None, name=None): if dtype is not None or not tf.nest.is_nested(struct): return tf.convert_to_tensor(struct, dtype=dtype) if _maybe_convertible_to_tensor(struct): try: return tf.convert_to_tensor(value=struct, name=name) except (ValueError, TypeError): ...
def __get_or_create( ns_cache: NamespaceMap, name: sym.Symbol, module: types.ModuleType = None, core_ns_name=CORE_NS, ) -> lmap.Map: ns = ns_cache.entry(name, None) if ns is not None: return ns_cache new_ns = Namespace(name, module=module) ...
def gauss(x, *p): A, mu, sigma = p return A * np.exp(-0.5 * (-mu + x)**2 / sigma**2)
def tempfile_set(tempfile, target): if target: os.rename(tempfile, target) else: os.unlink(tempfile) if target in TEMP_FILES: TEMP_FILES.remove(tempfile)
def _get_properties(config): property_classes = {BUILTIN_PROPERTY} property_names = set() if config is not None: property_classes.update(config.property_classes) property_names.update( (prop.rsplit(".", 1)[-1] for prop in config.property_classes) ) return property_cla...
def duration(self): ecc = self.ecc if not np.isnan(self.ecc) else np.sqrt(self.ecw**2 + self.esw**2) esw = self.esw if not np.isnan(self.esw) else ecc * np.sin(self.w) aRs = ((G * self.rhos * (1. + self.MpMs) * (self.per * DAYSEC)**2.) / (3. * np.pi))**(1./3.) inc = np.arc...
def __fetch_items(self, path, page=1): fetch_data = True parsed_crates = 0 total_crates = 0 while fetch_data: logger.debug("Fetching page: %i", page) try: payload = {'sort': 'alphabetical', 'page': page} raw_content = self.fetch(pat...
def _joint_mean(self): with tf.name_scope("mean_joint"): with tf.control_dependencies(self.runtime_assertions): initial_latent_mean = _broadcast_to_shape( self.initial_state_prior.mean()[..., tf.newaxis], tf.concat([self.batch_shape_tensor(), [self.latent...
def resolve_outputs(self): input_shape = None for i, shape in enumerate(self._input_shapes.values()): if i == 0: input_shape = shape if len(input_shape) != len(shape) or any( a is not None and b is not None and a != b for a,...
def read(self, filename): kwargs = {} if sys.version_info >= (3, 2): kwargs['encoding'] = "utf-8" return configparser.RawConfigParser.read(self, filename, **kwargs)
def normalize(self, dt, is_dst=False): if dt.tzinfo is None: raise ValueError('Naive time - no tzinfo set') return dt.replace(tzinfo=self)
def close(self): if self._closed: return self._socket.close() self._closed = True
def add_patches(self, patches, after=None): if after is None: self.insert_patches(patches) else: self._check_patch(after) patchlines = self._patchlines_before(after) patchlines.append(self.patch2line[after]) for patch in patches: ...
def update_key( self, vault_base_url, key_name, key_version, key_ops=None, key_attributes=None, tags=None, custom_headers=None, raw=False, **operation_config): parameters = models.KeyUpdateParameters(key_ops=key_ops, key_attributes=key_attributes, tags=tags) url = self.update_key.metadata['u...
def specific_gains(string): if not string: return {} gains = {} for gain in string.split(','): amp_name, value = gain.split('=') gains[amp_name.strip()] = float(value.strip()) return gains
def _with_loc(f: W) -> W: @functools.wraps(f) def with_lineno_and_col(ctx): meta = lmap.map( {READER_LINE_KW: ctx.reader.line, READER_COL_KW: ctx.reader.col} ) v = f(ctx) try: return v.with_meta(meta) except AttributeError: return v ...
def updates(self, **kwargs): regs = regularizers.from_kwargs(self, **kwargs) _, updates = self.build_graph(regs) return updates
def sqrt(wave): r dep_units = "{0}**0.5".format(wave.dep_units) return _operation(wave, "sqrt", dep_units, np.sqrt)
def parse_args(): usage = "Usage: create_concordance <infile> [<outfile>]" description = "Simple Concordance Generator" argparser = argparse.ArgumentParser( usage=usage, description=description) argparser.add_argument( 'infile', type=argparse.FileType('r'), help="File read in to ...
def _count_table_rows(self, table_name): cursor = self._db.cursor() select_stmt = "SELECT COUNT(*) FROM " + table_name try: cursor.execute(select_stmt) row = cursor.fetchone() except sqlite3.DatabaseError as e: msg = "invalid archive file; cause: %s" %...
def discount_status(request, form): discounts = form.cleaned_data["discount"] items = commerce.DiscountItem.objects.filter( Q(discount__in=discounts), ).select_related("cart", "product", "product__category") items = group_by_cart_status( items, ["discount"], ["discount", ...
def spin(self): if self._notification_socket: self._flush_notifications() if self._iopub_socket: self._flush_iopub(self._iopub_socket) if self._mux_socket: self._flush_results(self._mux_socket) if self._task_socket: self._flush_results(self...
def show(self, title=''): self.render(title=title) if self.fig: plt.show(self.fig)
def f_get_groups(self, copy=True): if copy: return self._groups.copy() else: return self._groups
def create_domain(self, domain_name, username=None, alphabet=Domain.DEFAULT_ALPHABET, length=Domain.DEFAULT_KEY_LENGTH): try: return self._create_domain(domain_name, username, alphabet, length) except Exception as ex: _logger.warn("Inserting new domain failed: %s", ex...
def get_help(self): if self.help: return self.help elif self.__doc__ and self.__doc__.strip(): return self.__doc__.strip() else: return ''
def _unique_constraint_name(table: str, field, keys): postfix = '_'.join(keys) return '{table}_{field}_unique_{postfix}'.format( table=table, field=field.column, postfix=postfix )
def get_result(self, indices_or_msg_ids=None, block=None): block = self.block if block is None else block if indices_or_msg_ids is None: indices_or_msg_ids = -1 if not isinstance(indices_or_msg_ids, (list,tuple)): indices_or_msg_ids = [indices_or_msg_ids] theids =...
def load_python_global(module, name): if module == '__builtin__' and six.PY3: module = 'builtins' module = importlib.import_module(module) return getattr(module, name)
def _req_rep_retry(self, request): retries_left = self.RETRIES while retries_left: self._logger.log(1, 'Sending REQ `%s`', request) self._send_request(request) socks = dict(self._poll.poll(self.TIMEOUT)) if socks.get(self._socket) == zmq.POLLIN: ...
def _check_inputs(self): try: _ = self._inputs[0] except TypeError: raise RuntimeError( "inputs should be iterable but found type='{0}', value=" "'{1}'".format(type(self._inputs), str(self._inputs))) from melody.inputs import Input ...
def individuals(context, institute, causatives, case_id): LOG.info("Running scout view individuals") adapter = context.obj['adapter'] individuals = [] if case_id: case = adapter.case(case_id=case_id) if case: cases = [case] else: LOG.info("Could not find c...
def show(self, *args, **kwargs): from webbrowser import open as webopen return webopen(str(self), *args, **kwargs)
def insert_child ( self, object, index, child ): if isinstance( child, Subgraph ): object.subgraphs.insert( index, child ) elif isinstance( child, Cluster ): object.clusters.insert( index, child ) elif isinstance( child, Node ): object.nodes.insert( index, chi...
def get_private_keys( self, index=0, count=1, security_level=AddressGenerator.DEFAULT_SECURITY_LEVEL, ): return commands.GetPrivateKeysCommand(self.adapter)( seed=self.seed, index=index, count=count, securityLeve...
async def limited(until): duration = int(round(until - time.time())) mins = duration / 60 fmt = 'We have exhausted a ratelimit quota. Retrying in %.2f seconds (%.3f minutes).' log.warn(fmt, duration, mins)
def get_last_activities(self, n): filenames = self.get_activity_list().iloc[-n:].filename.tolist() last_activities = [self.get_activity(f) for f in filenames] return last_activities
def fetch(self, category=CATEGORY_QUESTION, offset=DEFAULT_OFFSET): if not offset: offset = DEFAULT_OFFSET kwargs = {"offset": offset} items = super().fetch(category, **kwargs) return items
def get_public_tokens(self): r = self.remote_utils.get_url(self.url() + "public_tokens/") return r.json()
def validate_token(self, token, expected_data=None): try: data = self.load_token(token) if expected_data: for k in expected_data: if expected_data[k] != data["data"].get(k): return None return data except Bad...
async def set_session_state(self, state): await self._can_run() state = state.encode(self.encoding) if isinstance(state, six.text_type) else state return await self._mgmt_request_response( REQUEST_RESPONSE_SET_SESSION_STATE_OPERATION, {'session-id': self.session_id, 'sess...
def error(self, relative_to='AME2003'): df = self.df - Table(relative_to).df return Table(df=df)
def update_event_hub(self, hub_name, hub=None): _validate_not_none('hub_name', hub_name) request = HTTPRequest() request.method = 'PUT' request.host = self._get_host() request.path = '/' + _str(hub_name) + '?api-version=2014-01' request.body = _get_request_body(_convert_e...
def _get_pipeline_processes(self): with open(self.log_file) as fh: for line in fh: if re.match(".*Creating operator.*", line): match = re.match(".*Creating operator > (.*) --", line) process = match.group(1) if any([process....
def allow_request(self, request, view): if request.method != 'POST': return True return super(PostRequestThrottleMixin, self).allow_request(request, view)
def mr_reader(job, input_stream, loads=core.loads): for line in input_stream: yield loads(line),
def until_traits_are_present(self, element_with_traits): end_time = time.time() + self._timeout count = 1 missing_traits_descriptions = None while True: missing_traits_descriptions = [] try: missing_traits_descriptions = element_with_traits.evaluat...
def find_max_rad_npnp(self): max_rad = 0 max_npnp = 0 for res, _ in self.items(): if res != 'KEY': for _, ff_params in self[res].items(): if max_rad < ff_params[1]: max_rad = ff_params[1] if max_npnp < ff...
def crscode_to_string(codetype, code, format): link = 'http://spatialreference.org/ref/%s/%s/%s/' %(codetype,code,format) result = urllib2.urlopen(link).read() if not isinstance(result, str): result = result.decode() return result
def intern(self, sym: sym.Symbol, var: Var, force: bool = False) -> Var: m: lmap.Map = self._interns.swap(Namespace._intern, sym, var, force=force) return m.entry(sym)
def clone(url, path): adapter = None if url[:4] == "git@" or url[-4:] == ".git": adapter = Git(path) if url[:6] == "svn://": adapter = Svn(path) if url[:6] == "bzr://": adapter = Bzr(path) if url[:9] == "ssh://hg@": adapter = Hg(path) if adapter is None: r...
def _send_file(self, local, remote): remote = "%s:%s" % (self.location, remote) for i in range(10): if not os.path.exists(local): self.log.debug("waiting for %s" % local) time.sleep(1) else: break self.log.info("sending %s t...
def set_default_tlw(self, tlw, designer, inspector): "track default top level window for toolbox menu default action" self.designer = designer self.inspector = inspector
def _chunk_pars(freq_vector, data_matrix, pformat): pformat = pformat.upper() length = 4 for freq, data in zip(freq_vector, data_matrix): data = data.flatten() for index in range(0, data.size, length): fpoint = [freq] if not index else [None] cdata = data[index : inde...
def _determine_function_name_type(node, config=None): property_classes, property_names = _get_properties(config) if not node.is_method(): return "function" if node.decorators: decorators = node.decorators.nodes else: decorators = [] for decorator in decorators: if isi...
def get(self, name, factory, *factory_args, **factory_kwargs): update_thread_local = getattr(factory, 'update_thread_local', True) if (not update_thread_local) or (name not in self.__dict__): obj = factory(*factory_args, **factory_kwargs) if update_thread_local: s...
def _build_point_formats_dtypes(point_format_dimensions, dimensions_dict): return { fmt_id: _point_format_to_dtype(point_fmt, dimensions_dict) for fmt_id, point_fmt in point_format_dimensions.items() }
def fetch_metric(self, metric, start, end, tags={}, aggregator="sum", downsample=None, ms_resolution=True): query = "{aggregator}:{downsample}{metric}{{{tags}}}".format( aggregator=aggregator, downsample=downsample + "-avg:" if downsample else "", metric=metric, ...
def read(self): self.__fileobj.seek(self.data_offset) self.data = self.__fileobj.read(self.data_size)
def gen_timeout_resend(attempts): timeout = 2 ** (attempts + 1) + random.uniform(-1, +1) logger.debug('next timeout resending will happen on %s', future_dt_str(nowutc(), timeout)) return timeout
def apply(self, method, args): try: params = args['params'] if isinstance(params, dict): result = method(**params) else: result = method(*params) except Exception as error: server_error(args['id'], error) else: ...
def _add_group_from_storage(self, args, kwargs): return self._nn_interface._add_generic(self, type_name=GROUP, group_type_name=GROUP, args=args, ...
def hflip(img): if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) return img.transpose(Image.FLIP_LEFT_RIGHT)
def save_image(self, imagefile, save_path, file_ext, mime_type): file_to_save = InMemoryUploadedFile( imagefile, None, 'foo.%s' % file_ext, mime_type, imagefile.tell(), None ) file_to_save.seek(0) self.storage.save(s...
def draw(self): if not self.visible: return if not isinstance(self.submenu,Container): glEnable(GL_SCISSOR_TEST) glScissor(*self.pos+self.size) SubMenu.draw(self) if not isinstance(self.submenu,Container): glDisable(GL_SCISSOR_TEST)
def cb_help_message(self, option, optname, value, parser): self.linter.msgs_store.help_message(utils._splitstrip(value)) sys.exit(0)
def open(path, mode=gdalconst.GA_ReadOnly): path = getattr(path, 'name', path) try: return Raster(vsiprefix(path), mode) except AttributeError: try: imgdata = path.read() except AttributeError: raise TypeError('Not a file-like object providing read()') ...
def from_connection_string(cls, conn_str, *, loop=None, **kwargs): address, policy, key, _ = parse_conn_str(conn_str) parsed_namespace = urlparse(address) namespace, _, base = parsed_namespace.hostname.partition('.') return cls( service_namespace=namespace, shared...
def _read_config(self): self._config_loaded = True conf = [] for f in self._candidate_log_files(): if os.path.isfile(f): self._logger.info("Reading config file %s" % f) section_rx = re.compile(r"^\[(\w+)\]$") keyvalue_rx = re.compile(r"...
def clean_time_slots(self): ts = ((a[0], a[1]) for t in self.tiers.values() for a in t[0].values()) for a in {a for b in ts for a in b} ^ set(self.timeslots): del(self.timeslots[a])
def __last_beat(cumscore): maxes = util.localmax(cumscore) med_score = np.median(cumscore[np.argwhere(maxes)]) return np.argwhere((cumscore * maxes * 2 > med_score)).max()
def _basename_in_blacklist_re(base_name, black_list_re): for file_pattern in black_list_re: if file_pattern.match(base_name): return True return False
def _parse_header(line): parts = _parseparam(';' + line) key = parts.next() pdict = {} for p in parts: i = p.find('=') if i >= 0: name = p[:i].strip().lower() value = p[i+1:].strip() if len(value) >= 2 and value[0] == value[-1] == '"': ...
def plot_tree(T, res=None, title=None, cmap_id="Pastel2"): import matplotlib.pyplot as plt def round_time(t, res=0.1): v = int(t / float(res)) * res return v cmap = plt.get_cmap(cmap_id) level_bounds = [] for level in T.levels: if level == "root": continue ...
def validate_zone(zone): if not has_valid_id(zone): raise InvalidZone("%s must contain a valid 'id' attribute" % zone.__name__) if not has_valid_name(zone): raise InvalidZone("%s must contain a valid 'name' attribute" % zone.__name__)
def _merge(self, old, new, use_equals=False): if old is None: return new if new is None: return old if (old == new) if use_equals else (old is new): return old raise ValueError("Incompatible values: %s != %s" % (old, new))
def list(self, resource=None, type=None, actorId=None, _from=None, to=None, max=None, **request_parameters): check_type(resource, basestring) check_type(type, basestring) check_type(actorId, basestring) check_type(_from, basestring) check_type(to, basestring) ...
def _reformat_historical_formating_error(self): if PyFunceble.CONFIGURATION["inactive_database"]: historical_formating_error = ( PyFunceble.CURRENT_DIRECTORY + "inactive-db.json" ) if PyFunceble.path.isfile(historical_formating_error): data = D...
def _copy_image(self, name): image = self._get_image(name) QtGui.QApplication.clipboard().setImage(image)
def list(self): url = "api/v0002/mgmt/custom/bundle" r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r)
def log_attempt(self, key): with self.lock: if key not in self.attempts: self.attempts[key] = 1 else: self.attempts[key] += 1 if self.attempts[key] >= self.max_attempts: log.info('Account %s locked due to too many login ...
def get_process_gids(self): real, effective, saved = _psutil_bsd.get_process_gids(self.pid) return nt_gids(real, effective, saved)
def get_service_certificate(self, service_name, thumbalgorithm, thumbprint): _validate_not_none('service_name', service_name) _validate_not_none('thumbalgorithm', thumbalgorithm) _validate_not_none('thumbprint', thumbprint) return self._perform_get( '/' + self.subscription_id...
def calc_n_ints_in_file(filename): h = read_header(filename) n_bytes = int(h[b'nbits'] / 8) n_chans = h[b'nchans'] n_ifs = h[b'nifs'] idx_data = len_header(filename) f = open(filename, 'rb') f.seek(idx_data) filesize = os.path.getsize(filename) n_bytes_data = filesize - idx_data ...
def collect_things_entry_points(): things = dict() for entry_point in iter_entry_points(group='invenio_migrator.things'): things[entry_point.name] = entry_point.load() return things
def _check_token_present(self): try: self._get_value(CONFIGKEY_TOKEN) self._get_value(CONFIGKEY_REFRESH_TOKEN) self._get_value(CONFIGKEY_REFRESHABLE) except KeyError: self._log("Request new Token (CTP)") self._get_new_access_information()
def parse(self, *args): parsed_args = self.parser.parse_args(args) if parsed_args.category is None: delattr(parsed_args, 'category') if self._from_date: parsed_args.from_date = str_to_datetime(parsed_args.from_date) if self._to_date and parsed_args.to_date: ...
def connection_lost(self, exc): logger.debug("worker connection lost") self._worker.close() self._workers.remove(self._worker)
def register_handler(self, name, handler, esc_strings): self._handlers[name] = handler for esc_str in esc_strings: self._esc_handlers[esc_str] = handler