code
stringlengths
81
3.79k
def event(uid): db = get_session() event = db.query(RecordedEvent).filter(RecordedEvent.uid == uid).first() \ or db.query(UpcomingEvent).filter(UpcomingEvent.uid == uid).first() if event: return make_data_response(event.serialize()) return make_error_response('No event with specified uid...
def parse_database_url(url): if url == "sqlite://:memory:": raise Exception( 'Your url is "sqlite://:memory:", if you want ' 'an sqlite memory database, just use "sqlite://"' ) url_parts = urlsplit(url) engine = get_engine(url_parts.scheme) database, schema = pars...
def _matmul(a, b, transpose_a=False, transpose_b=False, adjoint_a=False, adjoint_b=False, a_is_sparse=False, b_is_sparse=False, name=None): if a_is_sparse or b_is_sparse: raise NotImplementedError('Numpy backend does not support sparse matmul.') if transpose_a or ...
def _broadcast_cat_event_and_params(event, params, base_dtype): if dtype_util.is_integer(event.dtype): pass elif dtype_util.is_floating(event.dtype): event = tf.cast(event, dtype=tf.int32) else: raise TypeError("`value` should have integer `dtype` or " "`self.dtype` ({})".format(ba...
def add_patch(self, patch): patchline = PatchLine(patch) patch = patchline.get_patch() if patch: self.patch2line[patch] = patchline self.patchlines.append(patchline)
def parse_rrset_record_values(e_resource_records): records = [] for e_record in e_resource_records: for e_value in e_record: records.append(e_value.text) return records
def verify_profile_name(msg, cfg): if msg.profile not in cfg.data: raise UnknownProfileError(msg.profile)
def bots(self): json = self.skype.conn("GET", "{0}/agents".format(SkypeConnection.API_BOT), auth=SkypeConnection.Auth.SkypeToken).json().get("agentDescriptions", []) return [self.merge(SkypeBotUser.fromRaw(self.skype, raw)) for raw in json]
def _makeApiCall(self, parameters=None): r = self._apiClient.get(self._url, parameters) if r.status_code == 200: return r.json() else: raise Exception("HTTP %s %s" % (r.status_code, r.text))
def volume_down(self): self._volume_level -= self._volume_step / self._max_volume self._device.vol_down(num=self._volume_step)
def bytesize(self, byteorder='@'): seed_size = struct.calcsize(byteorder+'q') length_size = struct.calcsize(byteorder+'i') hashvalue_size = struct.calcsize(byteorder+'I') return seed_size + length_size + len(self) * hashvalue_size
def get_certificate_from_publish_settings(publish_settings_path, path_to_write_certificate, subscription_id=None): import base64 try: from xml.etree import cElementTree as ET except ImportError: from xml.etree import ElementTree as ET try: import OpenSSL.crypto as crypto exce...
def wantClass(self, cls): declared = getattr(cls, '__test__', None) if declared is not None: wanted = declared else: wanted = (not cls.__name__.startswith('_') and (issubclass(cls, unittest.TestCase) or self.matches(cls.__n...
def t_NOTEQUAL(self, t): r"!\=" t.endlexpos = t.lexpos + len(t.value) return t
def get_dict(self, timeout=-1): results = self.get(timeout) engine_ids = [ md['engine_id'] for md in self._metadata ] bycount = sorted(engine_ids, key=lambda k: engine_ids.count(k)) maxcount = bycount.count(bycount[-1]) if maxcount > 1: raise ValueError("Cannot build ...
def create_file(self, bucket, key, file_versions): objs = [] for file_ver in file_versions: f = FileInstance.create().set_uri( file_ver['full_path'], file_ver['size'], 'md5:{0}'.format(file_ver['checksum']), ) obj = Obje...
def cli_command_restart(self, msg): info = '' if self.state == State.RUNNING and self.sprocess and self.sprocess.proc: self.state = State.RESTARTING self.sprocess.set_exit_callback(self.proc_exit_cb_restart) self.sprocess.proc.kill() info = 'killed' ...
def _reconstruct_matrix(data_list): matrix_format = data_list[0] data = data_list[1] is_empty = isinstance(data, str) and data == '__empty__' if matrix_format == 'csc': if is_empty: return spsp.csc_matrix(data_list[4]) else: return ...
def pull(dry_run, flavor, interactive, debug): try: main_section = _get_section_name(flavor) config = _try_load_config(main_section, interactive) lockfile_path = os.path.join(get_data_path(config, main_section), 'bugwarrior.lockfile') lockfile = P...
def label_search(self, key=None, value=None): if key is not None: key = key.lower() if value is not None: value = value.lower() show_details = True if key is None and value is None: url = '%s/labels/search' % (self.base) show_details = False elif key is not None and v...
def force_iterable(f): def wrapper(*args, **kwargs): r = f(*args, **kwargs) if hasattr(r, '__iter__'): return r else: return [r] return wrapper
def main(): args = get_args() ret_code = args.target(args) _logger.debug('Exiting with code %d', ret_code) sys.exit(ret_code)
def lock(fileobj): try: import fcntl except ImportError: return False else: try: fcntl.lockf(fileobj, fcntl.LOCK_EX) except IOError: return False else: return True
def make_bintree(levels): G = nx.DiGraph() root = '0' G.add_node(root) add_children(G, root, levels, 2) return G
def set_option(self, optname, value, action=None, optdict=None): if optname in self._options_methods or optname in self._bw_options_methods: if value: try: meth = self._options_methods[optname] except KeyError: meth = self._bw_o...
def _send_offer_assignment_notification_email(config, user_email, subject, email_body, site_code, task): try: sailthru_client = get_sailthru_client(site_code) except SailthruError: logger.exception( '[Offer Assignment] A client error occurred while attempting to send a offer assignme...
def migrate(uri: str, archive_uri: str, case_id: str, dry: bool, force: bool): scout_client = MongoClient(uri) scout_database = scout_client[uri.rsplit('/', 1)[-1]] scout_adapter = MongoAdapter(database=scout_database) scout_case = scout_adapter.case(case_id) if not force and scout_case.get('is_migr...
def load_disease_term(self, disease_obj): LOG.debug("Loading disease term %s into database", disease_obj['_id']) try: self.disease_term_collection.insert_one(disease_obj) except DuplicateKeyError as err: raise IntegrityError("Disease term %s already exists in database".fo...
def get_elliptic_curve(name): for curve in get_elliptic_curves(): if curve.name == name: return curve raise ValueError("unknown curve name", name)
def subscribe(self): self.stream.setsockopt(zmq.UNSUBSCRIBE, '') if '' in self.topics: self.log.debug("Subscribing to: everything") self.stream.setsockopt(zmq.SUBSCRIBE, '') else: for topic in self.topics: self.log.debug("Subscribing to: %r"%(t...
def _sem_open(name, value=None): if value is None: handle = pthread.sem_open(ctypes.c_char_p(name), 0) else: handle = pthread.sem_open(ctypes.c_char_p(name), SEM_OFLAG, SEM_PERM, ctypes.c_int(value)) if handle == SEM_FAILURE: e = ctypes.get_errno() ...
def parallel(collection, method, processes=None, args=None, **kwargs): if processes is None: processes = min(mp.cpu_count(), 20) print "Running parallel process on " + str(processes) + " cores. :-)" pool = mp.Pool(processes=processes) PROC = [] tic = time.time() for main_arg in collectio...
def uncache_zipdir(path): from zipimport import _zip_directory_cache as zdc _uncache(path, zdc) _uncache(path, sys.path_importer_cache)
def task_create(asana_workspace_id, name, notes, assignee, projects, completed, **kwargs): put("task_create", asana_workspace_id=asana_workspace_id, name=name, notes=notes, assignee=assignee, projects=projects, completed=completed, **kwargs)
def str_to_num(str_value): str_value = str(str_value) try: return int(str_value) except ValueError: return float(str_value)
def connect(url='https://github.com', token=None): gh_session = None if url == 'https://github.com': gh_session = create_session(token) else: gh_session = create_enterprise_session(url, token) if gh_session is None: msg = 'Unable to connect to (%s) with provided token.' r...
def generate_project(args): src = os.path.join(dirname(abspath(__file__)), 'project') project_name = args.get('<project>') if not project_name: logger.warning('Project name cannot be empty.') return dst = os.path.join(os.getcwd(), project_name) if os.path.isdir(dst): logger.w...
def with_ignored_exceptions(self, *ignored_exceptions): for exception in ignored_exceptions: self._ignored_exceptions = self._ignored_exceptions + (exception,) return self
def indented_short_title(self, item): r = "" if hasattr(item, 'get_absolute_url'): r = '<input type="hidden" class="medialibrary_file_path" value="%s" />' % item.get_absolute_url() editable_class = '' if not getattr(item, 'feincms_editable', True): editable_class ...
def render_columns(columns, write_borders=True, column_colors=None): if column_colors is not None and len(column_colors) != len(columns): raise ValueError('Wrong number of column colors') widths = [max(len(cell) for cell in column) for column in columns] max_column_length = max(len(column) for colum...
def create_database(self, server_name, name, service_objective_id, edition=None, collation_name=None, max_size_bytes=None): _validate_not_none('server_name', server_name) _validate_not_none('name', name) _validate_not_none('service_objective_id', s...
def xcom_push( self, key, value, execution_date=None): if execution_date and execution_date < self.execution_date: raise ValueError( 'execution_date can not be in the past (current ' 'execution_date is {}; received {})'....
def merged(self, timeslots: 'TimeslotCollection') -> 'TimeslotCollection': slots = [Timeslot(slot.interval, slot.channel) for slot in self.timeslots] slots.extend([Timeslot(slot.interval, slot.channel) for slot in timeslots.timeslots]) return TimeslotCollection(*slots)
def concat(*seqs) -> ISeq: allseqs = lseq.sequence(itertools.chain(*filter(None, map(to_seq, seqs)))) if allseqs is None: return lseq.EMPTY return allseqs
def _set_logger(self, name=None): if name is None: cls = self.__class__ name = '%s.%s' % (cls.__module__, cls.__name__) self._logger = logging.getLogger(name)
def as_event_description(self): description = { self.name: { 'timestamp': self.time, }, } if self.data is not None: description[self.name]['data'] = self.data return description
def add_element(self, element): if isinstance(element, BaseExpression): element.set_parent(self._working_fragment) self._working_fragment.elements.append(element) return self else: return self.add_operator(element)
def hub_history(self): self.session.send(self._query_socket, "history_request", content={}) idents, msg = self.session.recv(self._query_socket, 0) if self.debug: pprint(msg) content = msg['content'] if content['status'] != 'ok': raise self._unwrap_exceptio...
def mk_privkeys(num): "make privkeys that support coloring, see utils.cstr" privkeys = [] assert num <= num_colors for i in range(num): j = 0 while True: k = sha3(str(j)) a = privtoaddr(k) an = big_endian_to_int(a) if an % num_colors == i: ...
def parse_list(value): segments = _QUOTED_SEGMENT_RE.findall(value) for segment in segments: left, match, right = value.partition(segment) value = ''.join([left, match.replace(',', '\000'), right]) return [_dequote(x.strip()).replace('\000', ',') for x in value.split(',')]
def index(context): LOG.info("Running scout delete index") adapter = context.obj['adapter'] for collection in adapter.db.collection_names(): adapter.db[collection].drop_indexes() LOG.info("All indexes deleted")
def is_valid_filesys(path): if os.path.isabs(path) and os.path.isdir(path) and \ not os.path.isfile(path): return True else: raise LocalPortValidationError( 'Port value %s is not a valid filesystem location' % path )
def parse_coordinates(variant, category): ref = variant.REF if variant.ALT: alt = variant.ALT[0] if category=="str" and not variant.ALT: alt = '.' chrom_match = CHR_PATTERN.match(variant.CHROM) chrom = chrom_match.group(2) svtype = variant.INFO.get('SVTYPE') if svtype: ...
def get_metadata(path_or_module, metadata_version=None): if isinstance(path_or_module, ModuleType): try: return Installed(path_or_module, metadata_version) except (ValueError, IOError): pass try: __import__(path_or_module) except ImportError: pass ...
def unsubscribe(self, event, callback): try: self._subscribers[event].remove(self._Subscription(event, callback)) except KeyError: return False return True
def to_archive(self, writer): if 'b' not in writer.mode: raise GiraffeError("Archive writer must be in binary mode") writer.write(GIRAFFE_MAGIC) writer.write(self.columns.serialize()) i = 0 for n, chunk in enumerate(self._fetchall(ROW_ENCODING_RAW), 1): wr...
def flatten(iterables, level=inf): if level >= 0 and isinstance(iterables, (list, tuple, GeneratorType, map, zip)): level -= 1 for i in iterables: yield from flatten(i, level=level) else: yield iterables
def send(self): self._generate_email() if self.verbose: print( "Debugging info" "\n--------------" "\n{} Message created.".format(timestamp()) ) recipients = [] for i in (self.to, self.cc, self.bcc): if i...
def node(self, title, **args): self._stream.write('%snode: {title:"%s"' % (self._indent, title)) self._write_attributes(NODE_ATTRS, **args) self._stream.write("}\n")
def _next_rdelim(items, pos): for num, item in enumerate(items): if item > pos: break else: raise RuntimeError("Mismatched delimiters") del items[num] return item
def _extract_base(self, element): if isinstance(element, list): return [self._extract_base(x) for x in element] base = self.checker.is_url_valid(url=element, return_base=True) if base: return base if "/" in element: return element.split("/")[0] ...
def recent(self): url = "{0}/users/ME/conversations".format(self.skype.conn.msgsHost) params = {"startTime": 0, "view": "msnp24Equivalent", "targetType": "Passport|Skype|Lync|Thread"} resp = self.skype.conn.syncStateCall("GET", url, params, auth=SkypeConnectio...
def _html_checker(job_var, interval, status, header, _interval_set=False): job_status = job_var.status() job_status_name = job_status.name job_status_msg = job_status.value status.value = header % (job_status_msg) while job_status_name not in ['DONE', 'CANCELLED']: time.sle...
def display(self): if isinstance(self.name, six.string_types) and len(self.name) > 0: return '{0} ({1})'.format(self.name, self.public_ip) else: return self.public_ip
def parse_conservation(variant, info_key): raw_score = variant.INFO.get(info_key) conservations = [] if raw_score: if isinstance(raw_score, numbers.Number): raw_score = (raw_score,) for score in raw_score: if score >= CONSERVATION[info_key]['conserved_min']: ...
def _asa_task(q, masks, stft, sample_width, frame_rate, nsamples_for_each_fft): for mask in masks: mask = np.where(mask > 0, 1, 0) masks = [mask * stft for mask in masks] nparrs = [] dtype_dict = {1: np.int8, 2: np.int16, 4: np.int32} dtype = dtype_dict[sample_width] for m in masks: ...
def client_authentication_required(self, request, *args, **kwargs): def is_confidential(client): if hasattr(client, 'is_confidential'): return client.is_confidential client_type = getattr(client, 'client_type', None) if client_type: return clie...
def contains(self, k): if self._changed(): self._read() return k in self.store.keys()
def assert_is_type(var, *types, **kwargs): assert types, "The list of expected types was not provided" expected_type = types[0] if len(types) == 1 else U(*types) if _check_type(var, expected_type): return assert set(kwargs).issubset({"message", "skip_frames"}), "Unexpected keyword arguments: %r" % kwarg...
def success(self): any_success = False for step_event in itertools.chain( self.input_expectations, self.output_expectations, self.transforms ): if step_event.event_type == DagsterEventType.STEP_FAILURE: return False if step_event.event_type == ...
def session_new(self, **kwargs): path = self._get_path('session_new') response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
def filter_new(self, name, filt_str): filt = self.filt.grab_filt(filt=filt_str) self.filt.add(name, filt, info=filt_str) return
def _observe_mode(self, change): block = self.block if block and self.is_initialized and change['type'] == 'update': if change['oldvalue'] == 'replace': raise NotImplementedError for c in self.children: block.children.remove(c) c.se...
def read(self, input_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): super(GetAttributeListResponsePayload, self).read( input_buffer, kmip_version=kmip_version ) local_buffer = utils.BytearrayStream(input_buffer.read(self.length)) if self.is_tag_next(enums.Tags....
def registerAdminSite(appName, excludeModels=[]): for model in apps.get_app_config(appName).get_models(): if model not in excludeModels: admin.site.register(model)
def _load_rels(self, source): self.relationships.load(source=self, data=source)
def protocol_version_to_kmip_version(value): if not isinstance(value, ProtocolVersion): return None if value.major == 1: if value.minor == 0: return enums.KMIPVersion.KMIP_1_0 elif value.minor == 1: return enums.KMIPVersion.KMIP_1_1 elif value.minor == 2: ...
def genes_by_alias(self, build='37', genes=None): LOG.info("Fetching all genes by alias") alias_genes = {} if not genes: genes = self.hgnc_collection.find({'build':build}) for gene in genes: hgnc_id = gene['hgnc_id'] hgnc_symbol = gene['hgnc_symbol'] ...
def _expand_to_event_rank(self, x): expanded_x = x for _ in range(tensorshape_util.rank(self.event_shape)): expanded_x = tf.expand_dims(expanded_x, -1) return expanded_x
def start(self): if self._collectors: self._collectors[-1].pause() self._collectors.append(self) traces0 = [] if hasattr(sys, "gettrace"): fn0 = sys.gettrace() if fn0: tracer0 = getattr(fn0, '__self__', None) if tracer0:...
def _has_connection(hostname, port): try: host = socket.gethostbyname(hostname) socket.create_connection((host, port), 2) return True except Exception: return False
def rps_at(self, t): if 0 <= t <= self.duration: return self.minrps + \ float(self.maxrps - self.minrps) * t / self.duration else: return 0
def get_all_child_m2m_relations(model): return [ field for field in model._meta.get_fields() if isinstance(field, ParentalManyToManyField) ]
def display_json(*objs, **kwargs): raw = kwargs.pop('raw',False) if raw: for obj in objs: publish_json(obj) else: display(*objs, include=['text/plain','application/json'])
def pick_scalar_condition(pred, true_value, false_value, name=None): with tf.name_scope(name or "pick_scalar_condition"): pred = tf.convert_to_tensor( value=pred, dtype_hint=tf.bool, name="pred") true_value = tf.convert_to_tensor(value=true_value, name="true_value") false_value = tf.convert_to_ten...
def adapter(data, headers, table_format=None, **kwargs): keys = ('title', ) table = table_format_handler[table_format] t = table([headers] + list(data), **filter_dict_by_key(kwargs, keys)) dimensions = terminaltables.width_and_alignment.max_dimensions( t.table_data, t.padding_left, ...
def print_hex(data): hex_msg = "" for c in data: hex_msg += "\\x" + format(c, "02x") _LOGGER.debug(hex_msg)
def fetch(self, category=CATEGORY_BUILD): kwargs = {} items = super().fetch(category, **kwargs) return items
def _status_new(self): self._update_status() new_comp = self._group_report(self._comp_report, 'Completed') new_dead = self._group_report(self._dead_report, 'Dead, call jobs.traceback() for details') self._comp_report[:] = [] self._dead_report...
def encode(self): header = bytearray(1) varHeader = bytearray() payload = bytearray() if self.qos: header[0] = 0x30 | self.retain | (self.qos << 1) | (self.dup << 3) varHeader.extend(encodeString(self.topic)) varHeader.extend(encode16Int(self.msgI...
def symmetrized_csiszar_function(logu, csiszar_function, name=None): with tf.compat.v1.name_scope(name, "symmetrized_csiszar_function", [logu]): logu = tf.convert_to_tensor(value=logu, name="logu") return 0.5 * (csiszar_function(logu) + dual_csiszar_function(logu, csiszar_function))
def get_engine(scheme): path = scheme.split("+") first, rest = path[0], path[1:] second = rest[0] if rest else None engine = resolve(ENGINE_MAPPING, first) if not isinstance(engine, list): if second: raise KeyError("%s has no sub-engines" % first) return engine try: ...
def export_to_storage_bucket(self, bucket, namespace=None, entity_filter=None, labels=None): admin_conn = self.get_conn() output_uri_prefix = 'gs://' + '/'.join(filter(None, [bucket, namespace])) if not entity_filter: entity_filter = {} if not labels: labels = {} ...
def build_package_from_pr_number(gh_token, sdk_id, pr_number, output_folder, *, with_comment=False): con = Github(gh_token) repo = con.get_repo(sdk_id) sdk_pr = repo.get_pull(pr_number) package_names = {f.filename.split('/')[0] for f in sdk_pr.get_files() if f.filename.startswith("azure")} absolute_...
def verbose(self, msg, *args, **kwargs): self.log(logging.VERBOSE, msg, *args, **kwargs)
def set_value(self, value): self.validate_value(value) self.value.set(value)
def remoteDataReceived(self, connection, data): proto = self.getLocalProtocol(connection) proto.transport.write(data) return {}
def create_validator(data_struct_dict, name=None): if name is None: name = 'FromDictValidator' attrs = {} for field_name, field_info in six.iteritems(data_struct_dict): field_type = field_info['type'] if field_type == DictField.FIELD_TYPE_NAME and isinstance(field_info.get('validator...
def get_branch_mutation_matrix(self, node, full_sequence=False): pp,pc = self.marginal_branch_profile(node) expQt = self.gtr.expQt(self._branch_length_to_gtr(node)) if len(expQt.shape)==3: mut_matrix_stack = np.einsum('ai,aj,ija->aij', pc, pp, expQt) else: mut_mat...
def get_window(window, Nx, fftbins=True): if six.callable(window): return window(Nx) elif (isinstance(window, (six.string_types, tuple)) or np.isscalar(window)): return scipy.signal.get_window(window, Nx, fftbins=fftbins) elif isinstance(window, (np.ndarray, list)): if len(...