code
stringlengths
59
4.4k
docstring
stringlengths
5
7.69k
def _trackInstanceAndCheckForConcurrencyViolation(self): global g_max_concurrency, g_max_concurrency_raise_exception assert g_max_concurrency is not None assert self not in self._clsOutstandingInstances, repr(self) self._creationTracebackString = traceback.format_stack() if self._clsNumOutstanding >...
Check for concurrency violation and add self to _clsOutstandingInstances. ASSUMPTION: Called from constructor BEFORE _clsNumOutstanding is incremented
def filter_dict(d, exclude): ret = {} for key, value in d.items(): if key not in exclude: ret.update({key: value}) return ret
Return a new dict with specified keys excluded from the origional dict Args: d (dict): origional dict exclude (list): The keys that are excluded
def get_sampled(data, totn, node): names = sorted(totn) cdict = {name: idx for idx, name in enumerate(names)} if (node.is_leaf() or node.is_root()): return 0 else: if len(node.children) > 2: down_r = node.children[0] down_l = node.children[1] for child...
get total number of quartets sampled for a split
def _get_resource(self, url, data_key=None): headers = {"Accept": "application/json"} if self.token: headers["W-Token"] = "%s" % self.token response = WhenIWork_DAO().getURL(url, headers) if response.status != 200: raise DataFailureException(url, response.status, ...
When I Work GET method. Return representation of the requested resource.
def appendRecord(self, record): assert self._file is not None assert self._mode == self._FILE_WRITE_MODE assert isinstance(record, (list, tuple)), \ "unexpected record type: " + repr(type(record)) assert len(record) == self._fieldCount, \ "len(record): %s, fieldCount: %s" % (len(record), sel...
Saves the record in the underlying csv file. :param record: a list of Python objects that will be string-ified
def _warning(code): if isinstance(code, str): return code message = '' if isinstance(code, tuple): if isinstance(code[0], str): message = code[1] code = code[0] return CFG_BIBRECORD_WARNING_MSGS.get(code, '') + message
Return a warning message of code 'code'. If code = (cd, str) it returns the warning message of code 'cd' and appends str at the end
def format_objects(objects, children=False, columns=None, header=True): columns = columns or ('NAME', 'TYPE', 'PATH') objects = sorted(objects, key=_type_and_name) data = [] for obj in objects: if isinstance(obj, cpenv.VirtualEnvironment): data.append(get_info(obj)) modul...
Format a list of environments and modules for terminal output
def code_challenge(verifier): digest = hashlib.sha256(verifier).digest() return base64.urlsafe_b64encode(digest).rstrip(b'=')
Creates a 'code_challenge' as described in section 4.2 of RFC 7636 by taking the sha256 hash of the verifier and then urlsafe base64-encoding it. Args: verifier: bytestring, representing a code_verifier as generated by code_verifier(). Returns: Bytestring, representing a ur...
def write_config(self): with open(self.config_file, "w") as config_file: self.cfg.write(config_file)
Writes `self.cfg` to `self.config_file`.
def get(self, key, default=None): if self.in_memory: return self._memory_db.get(key, default) else: db = self._read_file() return db.get(key, default)
Get key value, return default if key doesn't exist
def popUpItem(self, *args): self.Press() time.sleep(.5) return self._menuItem(self, *args)
Return the specified item in a pop up menu.
def distribute_package(roles, cl_args): Log.info("Distributing heron package to nodes (this might take a while)...") masters = roles[Role.MASTERS] slaves = roles[Role.SLAVES] tar_file = tempfile.NamedTemporaryFile(suffix=".tmp").name Log.debug("TAR file %s to %s" % (cl_args["heron_dir"], tar_file)) make_tar...
distribute Heron packages to all nodes
def window_at(self, geom, window_shape): y_size, x_size = window_shape[0], window_shape[1] bounds = box(*geom.bounds) px = ops.transform(self.__geo_transform__.rev, bounds).centroid miny, maxy = int(px.y - y_size/2), int(px.y + y_size/2) minx, maxx = int(px.x - x_size/2), int(px....
Return a subsetted window of a given size, centered on a geometry object Useful for generating training sets from vector training data Will throw a ValueError if the window is not within the image bounds Args: geom (shapely,geometry): Geometry to center the image on win...
def build_pipeline(cls, project, zones, min_cores, min_ram, disk_size, boot_disk_size, preemptible, accelerator_type, accelerator_count, image, script_name, envs, inputs, outputs, pipeline_name): if min_cores is None: min_cores = job_model.DEFAULT...
Builds a pipeline configuration for execution. Args: project: string name of project. zones: list of zone names for jobs to be run at. min_cores: int number of CPU cores required per job. min_ram: int GB of RAM required per job. disk_size: int GB of disk to attach under /mnt/data. ...
def add_record_length_check(self, code=RECORD_LENGTH_CHECK_FAILED, message=MESSAGES[RECORD_LENGTH_CHECK_FAILED], modulus=1): t = code, message, modulus self._record_length_checks.append(t)
Add a record length check, i.e., check whether the length of a record is consistent with the number of expected fields. Arguments --------- `code` - problem code to report if a record is not valid, defaults to `RECORD_LENGTH_CHECK_FAILED` `message` - problem message to...
def set_directory(path=None): old_path = get_directory() terminate_server() cache.clear() if path: cache['language_check_dir'] = path try: get_jar_info() except Error: cache['language_check_dir'] = old_path raise
Set LanguageTool directory.
def stetson_jindex(ftimes, fmags, ferrs, weightbytimediff=False): ndet = len(fmags) if ndet > 9: medmag = npmedian(fmags) delta_prefactor = (ndet/(ndet - 1)) sigma_i = delta_prefactor*(fmags - medmag)/ferrs sigma_j = nproll(sigma_i,1) if weightbytimediff: diff...
This calculates the Stetson index for the magseries, based on consecutive pairs of observations. Based on Nicole Loncke's work for her Planets and Life certificate at Princeton in 2014. Parameters ---------- ftimes,fmags,ferrs : np.array The input mag/flux time-series with all non-fin...
def dumps(obj, startindex=1, separator=DEFAULT, index_separator=DEFAULT): try: firstkey = next(iter(obj.keys())) except StopIteration: return str() if isinstance(firstkey, six.text_type): io = StringIO() else: io = BytesIO() dump( obj=obj, fp=io, ...
Dump an object in req format to a string. :param Mapping obj: The object to serialize. Must have a keys method. :param separator: The separator between key and value. Defaults to u'|' or b'|', depending on the types. :param index_separator: The separator between key and index. Defaults to u'_' or b'_', ...
def SCAS(cpu, dest, src): dest_reg = dest.reg mem_reg = src.mem.base size = dest.size arg0 = dest.read() arg1 = src.read() res = arg0 - arg1 cpu._calculate_CMP_flags(size, res, arg0, arg1) increment = Operators.ITEBV(cpu.address_bit_size, cpu.DF, -size // ...
Scans String. Compares the byte, word, or double word specified with the memory operand with the value in the AL, AX, EAX, or RAX register, and sets the status flags according to the results. The memory operand address is read from either the ES:RDI, ES:EDI or the ES:DI registers (depen...
def add_mip_obj(model): if len(model.variables) > 1e4: LOGGER.warning("the MIP version of minimal media is extremely slow for" " models that large :(") exchange_rxns = find_boundary_types(model, "exchange") big_m = max(abs(b) for r in exchange_rxns for b in r.bounds) prob ...
Add a mixed-integer version of a minimal medium to the model. Changes the optimization objective to finding the medium with the least components:: minimize size(R) where R part of import_reactions Arguments --------- model : cobra.model The model to modify.
def create_query(self, attr): field = attr[0] operator = attr[1] value = attr[2] model = self.model if '.' in field: field_items = field.split('.') field_name = getattr(model, field_items[0], None) class_name = field_name.property.mapper.class_...
Mix all values and make the query
def _parse_notes_dict(sbase): notes = sbase.getNotesString() if notes and len(notes) > 0: pattern = r"<p>\s*(\w+\s*\w*)\s*:\s*([\w|\s]+)<" matches = re.findall(pattern, notes) d = {k.strip(): v.strip() for (k, v) in matches} return {k: v for k, v in d.items() if len(v) > 0} e...
Creates dictionary of COBRA notes. Parameters ---------- sbase : libsbml.SBase Returns ------- dict of notes
def model_to_pymatbridge(model, variable_name="model", matlab=None): if scipy_sparse is None: raise ImportError("`model_to_pymatbridge` requires scipy!") if matlab is None: from IPython import get_ipython matlab = get_ipython().magics_manager.registry["MatlabMagics"].Matlab model_inf...
send the model to a MATLAB workspace through pymatbridge This model can then be manipulated through the COBRA toolbox Parameters ---------- variable_name : str The variable name to which the model will be assigned in the MATLAB workspace matlab : None or pymatbridge.Matlab instanc...
def create(self): query = ( ).format(self.__tablename__, self.__key__, self.__value__) connection = sqlite3.connect(self.sqlite_file) cursor = connection.cursor() cursor.execute(query) connection.commit()
Create the new table in the SQLite database
def with_revision(self, label, number): t = self.clone() t.revision = Revision(label, number) return t
Returns a Tag with a given revision
def queries_map(): qs = _all_metric_queries() return dict(zip(qs[0], qs[1]) + zip(qs[2], qs[3]))
map from query parameter to query name
def run(self, clock): if clock.timestep_ix >= self.period_count: return for c in self.components: c.run(clock, self.gl) self._perform_year_end_procedure(clock)
Execute the entity at the current clock cycle. :param clock: The clock containing the current execution time and period information.
def verifyscrollbarhorizontal(self, window_name, object_name): try: object_handle = self._get_object_handle(window_name, object_name) if object_handle.AXOrientation == "AXHorizontalOrientation": return 1 except: pass return 0
Verify scrollbar is horizontal @param window_name: Window name to type in, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @param object_name: Object name to type in, either full name, LDTP's name convention, or a Unix glob. @type obje...
async def set_group_link_sharing_enabled( self, set_group_link_sharing_enabled_request ): response = hangouts_pb2.SetGroupLinkSharingEnabledResponse() await self._pb_request('conversations/setgrouplinksharingenabled', set_group_link_sharing_enabled_request,...
Set whether group link sharing is enabled for a conversation.
def get_volume(self, datacenter_id, volume_id): response = self._perform_request( '/datacenters/%s/volumes/%s' % (datacenter_id, volume_id)) return response
Retrieves a single volume by ID. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param volume_id: The unique ID of the volume. :type volume_id: ``str``
def get(self, key): node = self.get_node(key) if node is None: raise KeyError('No object named %s in the file' % key) if hasattr(node, 'attrs'): if 'pandas_type' in node.attrs: return self._read_group(node) return self._read_array(node)
Retrieve pandas object or group of Numpy ndarrays stored in file Parameters ---------- key : object Returns ------- obj : type of object stored in file
def _advapi32_decrypt(private_key, ciphertext, rsa_oaep_padding=False): flags = 0 if rsa_oaep_padding: flags = Advapi32Const.CRYPT_OAEP ciphertext = ciphertext[::-1] buffer = buffer_from_bytes(ciphertext) out_len = new(advapi32, 'DWORD *', len(ciphertext)) res = advapi32.CryptDecrypt( ...
Encrypts a value using an RSA private key via CryptoAPI :param private_key: A PrivateKey instance to decrypt with :param ciphertext: A byte string of the data to decrypt :param rsa_oaep_padding: If OAEP padding should be used instead of PKCS#1 v1.5 :raises: ValueError...
def splitext_files_only(filepath): "Custom version of splitext that doesn't perform splitext on directories" return ( (filepath, '') if os.path.isdir(filepath) else os.path.splitext(filepath) )
Custom version of splitext that doesn't perform splitext on directories
def fix_js_args(func): fcode = six.get_function_code(func) fargs = fcode.co_varnames[fcode.co_argcount - 2:fcode.co_argcount] if fargs == ('this', 'arguments') or fargs == ('arguments', 'var'): return func code = append_arguments(six.get_function_code(func), ('this', 'arguments')) return typ...
Use this function when unsure whether func takes this and arguments as its last 2 args. It will append 2 args if it does not.
def fromdict(cls, config, check_fields=True): m = super(Config, cls).__new__(cls) m.path = '.' m.verbose = False m.config = m._merge_defaults(config) if check_fields: m._check_fields() return m
Create a Config object from config dict directly.
def check(self): self._validate_settings() r = self.local_renderer r.env.alias = r.env.aliases[0] r.sudo(r.env.check_command_template)
Run inadyn from the commandline to test the configuration. To be run like: fab role inadyn.check
def root_path(): module_dir = os.path.dirname(globals()['__file__']) return os.path.dirname(os.path.dirname(module_dir))
Get the absolute path to the root of the demosys package
def close_cursor(self, handle): if handle in self.cursors: self.cursors[handle].close() else: raise KeyError('cursor with handle %s was not found' % handle)
Closes the cursor specified and removes it from the `self.cursors` dictionary.
def get_current_course_run(course, users_active_course_runs): current_course_run = None filtered_course_runs = [] all_course_runs = course['course_runs'] if users_active_course_runs: current_course_run = get_closest_course_run(users_active_course_runs) else: for course_run in all_cou...
Return the current course run on the following conditions. - If user has active course runs (already enrolled) then return course run with closest start date Otherwise it will check the following logic: - Course run is enrollable (see is_course_run_enrollable) - Course run has a verified seat and the u...
def delete_async(self, url, name, callback=None, params=None, headers=None): if not name: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) process_pool.apply_async(make_delete_reques...
Asynchronous DELETE request with the process pool.
def resources(ctx, gpu): user, project_name, _job = get_job_or_local(ctx.obj.get('project'), ctx.obj.get('job')) try: message_handler = Printer.gpu_resources if gpu else Printer.resources PolyaxonClient().job.resources(user, project_name, ...
Get job resources. Uses [Caching](/references/polyaxon-cli/#caching) Examples: \b ```bash $ polyaxon job -j 2 resources ``` For GPU resources \b ```bash $ polyaxon job -j 2 resources --gpu ```
def check_and_load_ssh_auth(): mac_username = get_config_value(constants.CONFIG_MAC_USERNAME_KEY) if not mac_username: logging.info("Can't setup ssh authorization; no mac_username specified") return if not _running_on_mac(): logging.info("Skipping SSH load, we are not running on Mac"...
Will check the mac_username config value; if it is present, will load that user's SSH_AUTH_SOCK environment variable to the current environment. This allows git clones to behave the same for the daemon as they do for the user
def delete(path, verbose=False): if not os.path.exists(path): if os.path.islink(path): if verbose: print('Deleting broken link="{}"'.format(path)) os.unlink(path) elif os.path.isdir(path): if verbose: print('Deleting broken director...
Removes a file or recursively removes a directory. If a path does not exist, then this is does nothing. Args: path (PathLike): file or directory to remove verbose (bool): if True prints what is being done SeeAlso: send2trash - A cross-platform Python package for sending files ...
def get_subnets(context, limit=None, page_reverse=False, sorts=['id'], marker=None, filters=None, fields=None): LOG.info("get_subnets for tenant %s with filters %s fields %s" % (context.tenant_id, filters, fields)) filters = filters or {} subnets = db_api.subnet_find(context, li...
Retrieve a list of subnets. The contents of the list depends on the identity of the user making the request (as indicated by the context) as well as any filters. : param context: neutron api request context : param filters: a dictionary with keys that are valid keys for a subnet as listed i...
def sell(self, item_id, bid, buy_now, duration=3600, fast=False): method = 'POST' url = 'auctionhouse' data = {'buyNowPrice': buy_now, 'startingBid': bid, 'duration': duration, 'itemData': {'id': item_id}} rc = self.__request__(method, url, data=json.dumps(data), params={'sku_b': self.sk...
Start auction. Returns trade_id. :params item_id: Item id. :params bid: Stard bid. :params buy_now: Buy now price. :params duration: Auction duration in seconds (Default: 3600).
def model_stoch_vol(data, samples=2000, progressbar=True): from pymc3.distributions.timeseries import GaussianRandomWalk with pm.Model() as model: nu = pm.Exponential('nu', 1. / 10, testval=5.) sigma = pm.Exponential('sigma', 1. / .02, testval=.1) s = GaussianRandomWalk('s', sigma**-2, s...
Run stochastic volatility model. This model estimates the volatility of a returns series over time. Returns are assumed to be T-distributed. lambda (width of T-distributed) is assumed to follow a random-walk. Parameters ---------- data : pandas.Series Return series to model. sample...
def getnodefor(self, name): "Return the node where the ``name`` would land to" node = self._getnodenamefor(name) return {node: self.cluster['nodes'][node]}
Return the node where the ``name`` would land to
def _fly(self, board, layers, things, the_plot): if (self.character in the_plot['bunker_hitters'] or self.character in the_plot['marauder_hitters']): return self._teleport((-1, -1)) self._north(board, the_plot)
Handles the behaviour of visible bolts flying toward Marauders.
def print_round_trip_stats(round_trips, hide_pos=False): stats = gen_round_trip_stats(round_trips) print_table(stats['summary'], float_format='{:.2f}'.format, name='Summary stats') print_table(stats['pnl'], float_format='${:.2f}'.format, name='PnL stats') print_table(stats['duration'], f...
Print various round-trip statistics. Tries to pretty-print tables with HTML output if run inside IPython NB. Parameters ---------- round_trips : pd.DataFrame DataFrame with one row per round trip trade. - See full explanation in round_trips.extract_round_trips See also --------...
def basecaller(arrayed, mindepth_majrule, mindepth_statistical, estH, estE): cons = np.zeros(arrayed.shape[1], dtype=np.uint8) cons.fill(78) arr = arrayed.view(np.uint8) for col in xrange(arr.shape[1]): carr = arr[:, col] mask = carr == 45 mask += carr == 78 marr ...
call all sites in a locus array.
def stop_erps(self, stop_erps): _set_params(self.ode_obj, 'StopERP', stop_erps, self.ADOF + self.LDOF)
Set the ERP values for this object's DOF limits. Parameters ---------- stop_erps : float or sequence of float An ERP value to set on all degrees of freedom limits, or a list containing one such value for each degree of freedom limit.
def transfer(self, data, assert_ss=True, deassert_ss=True): if self._mosi is None: raise RuntimeError('Write attempted with no MOSI pin specified.') if self._miso is None: raise RuntimeError('Read attempted with no MISO pin specified.') if assert_ss and self._ss is not No...
Full-duplex SPI read and write. If assert_ss is true, the SS line will be asserted low, the specified bytes will be clocked out the MOSI line while bytes will also be read from the MISO line, and if deassert_ss is true the SS line will be put back high. Bytes which are read will be ret...
def to_str(prev, encoding=None): first = next(prev) if isinstance(first, str): if encoding is None: yield first for s in prev: yield s else: yield first.encode(encoding) for s in prev: yield s.encode(encoding) el...
Convert data from previous pipe with specified encoding.
def reset_counter(self): self._cnt_retries = 0 for i in self._url_counter: self._url_counter[i] = 0
reset the failed connection counters
def clean(ctx): ctx.run(f'python setup.py clean') dist = ROOT.joinpath('dist') print(f'removing {dist}') shutil.rmtree(str(dist))
Clean previously built package artifacts.
def calculate(self, T, P, zs, ws, method): r if method == SIMPLE: Cplms = [i(T) for i in self.HeatCapacityLiquids] return mixing_simple(zs, Cplms) elif method == LALIBERTE: ws = list(ws) ; ws.pop(self.index_w) Cpl = Laliberte_heat_capacity(T, ws, s...
r'''Method to calculate heat capacity of a liquid mixture at temperature `T`, pressure `P`, mole fractions `zs` and weight fractions `ws` with a given method. This method has no exception handling; see `mixture_property` for that. Parameters ---------- T : floa...
def execute(cur, *args): stmt = args[0] if len(args) > 1: stmt = stmt.replace('%', '%%').replace('?', '%r') print(stmt % (args[1])) return cur.execute(*args)
Utility function to print sqlite queries before executing. Use instead of cur.execute(). First argument is cursor. cur.execute(stmt) becomes util.execute(cur, stmt)
def awake(self, procid): logger.debug(f"Remove procid:{procid} from waitlists and reestablish it in the running list") for wait_list in self.rwait: if procid in wait_list: wait_list.remove(procid) for wait_list in self.twait: if procid in wait_list: ...
Remove procid from waitlists and reestablish it in the running list
def is_ipv6_available(): try: socket.socket(socket.AF_INET6).close() except (socket.error, AttributeError): return False return True
Check if IPv6 is available. :Return: `True` when an IPv6 socket can be created.
def from_server(cls, server, slug, identifier): task = server.get( 'task', replacements={ 'slug': slug, 'identifier': identifier}) return cls(**task)
Retrieve a task from the server
def stop(ctx, commit, yes): user, project_name = get_project_or_local(ctx.obj.get('project')) if not yes and not click.confirm("Are sure you want to stop notebook " "for project `{}/{}`".format(user, project_name)): click.echo('Existing without stopping notebook.') ...
Stops the notebook deployment for this project if it exists. Uses [Caching](/references/polyaxon-cli/#caching)
def list_compounds(): print('Compounds currently loaded:') for compound in sorted(compounds.keys()): phases = compounds[compound].get_phase_list() print('%s: %s' % (compound, ', '.join(phases)))
List all compounds that are currently loaded in the thermo module, and their phases.
def auto_constraints(self, component=None): if not component: for table in self.tables: self.auto_constraints(table) return if not component.tableSchema.primaryKey: idcol = component.get_column(term_uri('id')) if idcol: comp...
Use CLDF reference properties to implicitely create foreign key constraints. :param component: A Table object or `None`.
def _credentials_from_request(request): if (oauth2_settings.storage_model is None or request.user.is_authenticated()): return get_storage(request).get() else: return None
Gets the authorized credentials for this flow, if they exist.
def get_style(self, name, workspace=None): styles = self.get_styles(names=name, workspaces=workspace) return self._return_first_item(styles)
returns a single style object. Will return None if no style is found. Will raise an error if more than one style with the same name is found.
def udp_messenger(domain_name, UDP_IP, UDP_PORT, sock_timeout, message): try: if message is None: raise ValueError("message was none") encoded_message = bytes(message, "utf-8") if encoded_message is None: raise ValueError("utf-8 encoding of message failed") if...
Send UDP messages to usage tracker asynchronously This multiprocessing based messenger was written to overcome the limitations of signalling/terminating a thread that is blocked on a system call. This messenger is created as a separate process, and initialized with 2 queues, to_send to receive messages...
def checkSerial(self): for item in self.rxSerial(self._TUN._tun.mtu): try: self._TUN._tun.write(item) except pytun.Error as error: print("pytun error writing: {0}".format(item)) print(error)
Check the serial port for data to write to the TUN adapter.
def conference_speak(self, call_params): path = '/' + self.api_version + '/ConferenceSpeak/' method = 'POST' return self.request(path, method, call_params)
REST Conference Speak helper
def shuffle_srv(records): if not records: return [] ret = [] while len(records) > 1: weight_sum = 0 for rrecord in records: weight_sum += rrecord.weight + 0.1 thres = random.random() * weight_sum weight_sum = 0 for rrecord in records: w...
Randomly reorder SRV records using their weights. :Parameters: - `records`: SRV records to shuffle. :Types: - `records`: sequence of :dns:`dns.rdtypes.IN.SRV` :return: reordered records. :returntype: `list` of :dns:`dns.rdtypes.IN.SRV`
def pad_to_aspect_ratio(self, aspect_ratio, mode="constant", cval=0.0, return_pad_amounts=False): arr_padded, pad_amounts = ia.pad_to_aspect_ratio(self.arr, aspect_ratio=aspect_ratio, mode=mode, cval=cval, return_pad_amounts=True) segmap = Segment...
Pad the segmentation map on its sides so that its matches a target aspect ratio. Depending on which dimension is smaller (height or width), only the corresponding sides (left/right or top/bottom) will be padded. In each case, both of the sides will be padded equally. Parameters ...
def post(self, url, params={}, files=None): params.update({'api_key': self.api_key}) try: response = requests.post(self.host + url, data=params, files=files) return self.json_parse(response.content) except RequestException as e: return self.json_parse(e.args)
Issues a POST request against the API, allows for multipart data uploads :param url: a string, the url you are requesting :param params: a dict, the key-value of all the parameters needed in the request :param files: a list, the list of tuples of files :returns: ...
def find_imports(self, pbds): imports = list(set(self.uses).difference(set(self.defines))) for imp in imports: for p in pbds: if imp in p.defines: self.imports.append(p.name) break self.imports = list(set(self.imports)) ...
Find all missing imports in list of Pbd instances.
def load_image(self): try: image = initializers.load_tiff(self.filename) image = initializers.normalize( image, invert=self.invert, scale=self.exposure, dtype=self.float_precision ) except IOError as e: log.error("Could not ...
Read the file and perform any transforms to get a loaded image
def rotation( x, rg=20, is_random=False, row_index=0, col_index=1, channel_index=2, fill_mode='nearest', cval=0., order=1 ): if is_random: theta = np.pi / 180 * np.random.uniform(-rg, rg) else: theta = np.pi / 180 * rg rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0], [...
Rotate an image randomly or non-randomly. Parameters ----------- x : numpy.array An image with dimension of [row, col, channel] (default). rg : int or float Degree to rotate, usually 0 ~ 180. is_random : boolean If True, randomly rotate. Default is False row_index col_in...
def _select_manager(backend_name): if backend_name == 'RedisBackend': lock_manager = _LockManagerRedis elif backend_name == 'DatabaseBackend': lock_manager = _LockManagerDB else: raise NotImplementedError return lock_manager
Select the proper LockManager based on the current backend used by Celery. :raise NotImplementedError: If Celery is using an unsupported backend. :param str backend_name: Class name of the current Celery backend. Usually value of current_app.extensions['celery'].celery.backend.__class__.__name__. ...
def check_consistency(self): error = False regex = re.compile('([a-zA-Z_][a-zA-Z0-9_]*)') if 'full' not in self.modelstr: raise ModelError( 'Model must contain a `full` key describing ' 'the entire image formation' ) for name, eq in...
Make sure that the required comps are included in the list of components supplied by the user. Also check that the parameters are consistent across the many components.
def squad(self, squad_id=0, persona_id=None): method = 'GET' url = 'squad/%s/user/%s' % (squad_id, persona_id or self.persona_id) events = [self.pin.event('page_view', 'Hub - Squads')] self.pin.send(events) rc = self.__request__(method, url) events = [self.pin.event('page...
Return a squad. :params squad_id: Squad id.
def proc_elms(**kwargs) -> list: return [ (ELEM_KEYS.get(k, k), ELEM_VALS.get(ELEM_KEYS.get(k, k), dict()).get(v, v)) for k, v in kwargs.items() if (k in list(ELEM_KEYS.keys()) + list(ELEM_KEYS.values())) and (k not in PRSV_COLS) ]
Bloomberg overrides for elements Args: **kwargs: overrides Returns: list of tuples Examples: >>> proc_elms(PerAdj='A', Per='W') [('periodicityAdjustment', 'ACTUAL'), ('periodicitySelection', 'WEEKLY')] >>> proc_elms(Days='A', Fill='B') [('nonTradingDayFillO...
def _interrupt_read(self): data = self._device.read(ENDPOINT, REQ_INT_LEN, timeout=TIMEOUT) LOGGER.debug('Read data: %r', data) return data
Read data from device.
def create_project_thread(session, member_ids, project_id, message): return create_thread(session, member_ids, 'project', project_id, message)
Create a project thread
def changeset_info(changeset): keys = [tag.attrib.get('k') for tag in changeset.getchildren()] keys += ['id', 'user', 'uid', 'bbox', 'created_at'] values = [tag.attrib.get('v') for tag in changeset.getchildren()] values += [ changeset.get('id'), changeset.get('user'), changeset.get('uid'), ...
Return a dictionary with id, user, user_id, bounds, date of creation and all the tags of the changeset. Args: changeset: the XML string of the changeset.
def format_docstring(*args, **kwargs): def decorator(func): func.__doc__ = getdoc(func).format(*args, **kwargs) return func return decorator
Decorator for clean docstring formatting
def _add_id_to_keys(self, pk, conn=None): if conn is None: conn = self._get_connection() conn.sadd(self._get_ids_key(), pk)
_add_id_to_keys - Adds primary key to table internal
def add(path): click.echo('\nAdding {} to cache......'.format(path), nl=False) try: r = cpenv.resolve(path) except Exception as e: click.echo(bold_red('FAILED')) click.echo(e) return if isinstance(r.resolved[0], cpenv.VirtualEnvironment): EnvironmentCache.add(r.re...
Add an environment to the cache. Allows you to activate the environment by name instead of by full path
def bulkDetails(self, packageNames): params = {'au': '1'} req = googleplay_pb2.BulkDetailsRequest() req.docid.extend(packageNames) data = req.SerializeToString() message = self.executeRequestApi2(BULK_URL, post_data=data.decode("utf-8"), ...
Get several apps details from a list of package names. This is much more efficient than calling N times details() since it requires only one request. If an item is not found it returns an empty object instead of throwing a RequestError('Item not found') like the details() function Args...
def experiment(ctx, project, experiment): ctx.obj = ctx.obj or {} ctx.obj['project'] = project ctx.obj['experiment'] = experiment
Commands for experiments.
def parse_scalar(scalar_data, version): try: return hs_scalar[version].parseString(scalar_data, parseAll=True)[0] except pp.ParseException as pe: raise ZincParseException( 'Failed to parse scalar: %s' % reformat_exception(pe), scalar_data, 1, pe.col) except: ...
Parse a Project Haystack scalar in ZINC format.
def get_moderation(request): with db_connect() as db_conn: with db_conn.cursor() as cursor: cursor.execute( ) moderations = [x[0] for x in cursor.fetchall()] return moderations
Return the list of publications that need moderation.
def cmd_status(opts): config = load_config(opts.config) b = get_blockade(config, opts) containers = b.status() print_containers(containers, opts.json)
Print status of containers and networks
def _aggr_mode(inList): valueCounts = dict() nonNone = 0 for elem in inList: if elem == SENTINEL_VALUE_FOR_MISSING_DATA: continue nonNone += 1 if elem in valueCounts: valueCounts[elem] += 1 else: valueCounts[elem] = 1 if nonNone == 0: return None sortedCounts = valueCount...
Returns most common value seen in the non-None elements of the list
def main(): cred_search = CredentialSearch() arg = argparse.ArgumentParser(parents=[cred_search.argparser], conflict_handler='resolve') arg.add_argument('-c', '--count', help="Only show the number of results", action="store_true") arguments = arg.parse_args() if arguments.count: print_line("...
Main credentials tool
def attach(self, observer): if not observer in self._observers: self._observers.append(observer) return self
Attach an observer. Args: observer (func): A function to be called when new messages arrive Returns: :class:`Stream`. Current instance to allow chaining
def edges(self, **kwargs): edges = self._query('edges', **kwargs) for edge in edges: identifier_source = edge['source_type'] + \ '[' + edge['source_title'] + ']' identifier_target = edge['target_type'] + \ '[' + edge['target_title'] + ']' ...
Get the known catalog edges, formed between two resources. :param \*\*kwargs: The rest of the keyword arguments are passed to the _query function. :returns: A generating yielding Edges. :rtype: :class:`pypuppetdb.types.Edge`
def check_mro(self, bases): try: self.add_node("temp") for base in bases: nx.DiGraph.add_edge(self, base, "temp") result = self.get_mro("temp")[1:] finally: self.remove_node("temp") return result
Check if C3 MRO is possible with given bases
def template_uploader_yaml(cl_args, masters): single_master = masters[0] uploader_config_template = "%s/standalone/templates/uploader.template.yaml" \ % cl_args["config_path"] uploader_config_actual = "%s/standalone/uploader.yaml" % cl_args["config_path"] template_file(uploader_conf...
Tempate uploader.yaml
def delete_category(category_id): try: res = _pybossa_req('delete', 'category', category_id) if type(res).__name__ == 'bool': return True else: return res except: raise
Delete a Category with id = category_id. :param category_id: PYBOSSA Category ID :type category_id: integer :returns: True -- the response status code
def compile_protofile(proto_file_path): out_file = tempfile.mkstemp()[1] try: subprocess.check_output(['protoc', '--include_source_info', '--descriptor_set_out', out_file, proto_file_path]) except subprocess.CalledProcessError as e: ...
Compile proto file to descriptor set. Args: proto_file_path: Path to proto file to compile. Returns: Path to file containing compiled descriptor set. Raises: SystemExit if the compilation fails.
def _get_request_type(self): value = self.document.tag.lower() if value in allowed_request_types[self.params['service']]: self.params["request"] = value else: raise OWSInvalidParameterValue("Request type %s is not supported" % value, value="request") return self.p...
Find requested request type in POST request.
def add_directory(self, *args, **kwargs): exc = kwargs.get('exclusions', None) for path in args: self.files.append(DirectoryPath(path, self, exclusions=exc))
Add directory or directories list to bundle :param exclusions: List of excluded paths :type path: str|unicode :type exclusions: list
def convert_lrelu(params, w_name, scope_name, inputs, layers, weights, names): print('Converting lrelu ...') if names == 'short': tf_name = 'lRELU' + random_string(3) elif names == 'keep': tf_name = w_name else: tf_name = w_name + str(random.random()) leakyrelu = \ ke...
Convert leaky relu layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for k...