code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def cartesian(self, other): <NEW_LINE> <INDENT> v1 = self.toLocalIterator() <NEW_LINE> v2 = other.collect() <NEW_LINE> return self.context.parallelize([(a, b) for a in v1 for b in v2])
cartesian product of this RDD with ``other`` :param RDD other: Another RDD. :rtype: RDD .. note:: This is currently implemented as a local operation requiring all data to be pulled on one machine. Example: >>> from pysparkling import Context >>> rdd = Context().parallelize([1, 2]) >>> sorted(rdd.cartesian(rdd).collect()) [(1, 1), (1, 2), (2, 1), (2, 2)]
625941be07d97122c4178799
def _finalize_axis(self, key): <NEW_LINE> <INDENT> if 'title' in self.handles: <NEW_LINE> <INDENT> self.handles['title'].set_visible(self.show_title) <NEW_LINE> <DEDENT> self.drawn = True <NEW_LINE> if self.subplot: <NEW_LINE> <INDENT> return self.handles['axis'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fig = self.handles['fig'] <NEW_LINE> if not getattr(self, 'overlaid', False) and self._close_figures: <NEW_LINE> <INDENT> plt.close(fig) <NEW_LINE> <DEDENT> return fig
General method to finalize the axis and plot.
625941bea79ad161976cc058
def get_param_lim(self, parameter, limit, timeout=2.0, return_full = True): <NEW_LINE> <INDENT> response = get_value(self._host, self._port, self._api_v, "detector", "config", parameter, timeout=timeout, return_full=return_full) <NEW_LINE> return response[limit]
Returns the limit max or min of the given parameter :param string parameter: parameter name :param string limit: max or min :param float timeout: communication timeout in seconds :param bool return_full: whether to return the full response dict :returns: max count time in seconds :rtype: float
625941be1d351010ab855a30
def setRandomPolicy( self ): <NEW_LINE> <INDENT> self.movePolicy = RandomMover( self.pieces )
The player should perform random moves
625941be92d797404e30409c
def get_text_coherence_errors(self): <NEW_LINE> <INDENT> text_coherence_errors = [] <NEW_LINE> author = ['i', 'we', 'me', 'myself', 'ourself', 'us', 'our', 'my', 'mine'] <NEW_LINE> reader = ['you', 'yourself', 'your', 'yours'] <NEW_LINE> male = ['he', 'him', 'himself', 'his'] <NEW_LINE> female = ['she', 'her', 'herself'] <NEW_LINE> neutral_singular = ['it', 'itself', 'oneself', 'its', 'ourselves'] <NEW_LINE> neutral_plural = ['they', 'them', 'themself', 'their', 'themselves'] <NEW_LINE> sent_tagged = [] <NEW_LINE> sent_words = [] <NEW_LINE> for i, sent in enumerate(self.sentences): <NEW_LINE> <INDENT> x=nltk.word_tokenize(sent) <NEW_LINE> tagged = nltk.pos_tag(x) <NEW_LINE> sent_tagged.append([tag for word,tag in tagged]) <NEW_LINE> sent_words.append(x) <NEW_LINE> if i==0: <NEW_LINE> <INDENT> previous_tags = [] <NEW_LINE> previous_words = [] <NEW_LINE> <DEDENT> elif i==1: <NEW_LINE> <INDENT> previous_tags = sent_tagged[i-1] <NEW_LINE> previous_words = sent_words[i-1] <NEW_LINE> <DEDENT> elif i==2: <NEW_LINE> <INDENT> previous_tags = sent_tagged[i-1] + sent_tagged[i-2] <NEW_LINE> previous_words = sent_words[i-1] + sent_words[i-2] <NEW_LINE> <DEDENT> for j, (word, tag) in enumerate(tagged): <NEW_LINE> <INDENT> if tag in ['PRP', 'PRP$']: <NEW_LINE> <INDENT> if word.lower() in author or word.lower() in reader: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif word.lower() in female or word.lower() in male: <NEW_LINE> <INDENT> if 'NN' not in previous_tags and 'NNP' not in previous_tags and 'NN' not in sent_tagged[i][:j] and 'NNP' not in sent_tagged[i][:j]: <NEW_LINE> <INDENT> text_coherence_errors.append(tag) <NEW_LINE> <DEDENT> <DEDENT> elif word.lower() in neutral_plural: <NEW_LINE> <INDENT> if 'NNS' not in previous_tags and 'NNPS' not in previous_tags and 'NNS' not in sent_tagged[i][:j] and 'NNPS' not in sent_tagged[i][:j]: <NEW_LINE> <INDENT> text_coherence_errors.append(tag) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return text_coherence_errors
Return list of text coherence errors Returns: text_coherence_errors(list): list of text coherence errors
625941be99cbb53fe6792afa
def __init__(self, name, addr, size, info): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.addr = addr <NEW_LINE> self.size = size <NEW_LINE> self.symtype = Symbol.typeNameToInt(info["type"]) <NEW_LINE> self.bindtype = Symbol.bindNameToInt(info["bind"])
Create a symbol using values read from the ELF symbol table.
625941bef8510a7c17cf960e
def test_check(self): <NEW_LINE> <INDENT> succ = BlockDev.btrfs_create_volume([self.loop_dev], None, None, None) <NEW_LINE> self.assertTrue(succ) <NEW_LINE> succ = BlockDev.btrfs_check(self.loop_dev) <NEW_LINE> self.assertTrue(succ)
Verify that it's possible to check the btrfs filesystem
625941be009cb60464c632c7
def direct_search(priority): <NEW_LINE> <INDENT> def decorator(searcher): <NEW_LINE> <INDENT> searcher.direct_search_priority = priority <NEW_LINE> return searcher <NEW_LINE> <DEDENT> return decorator
Mark a function as being a direct search provider. :arg priority: A priority to attach to the function. Direct searchers are called in order of increasing priority.
625941be5fc7496912cc3891
def up_block(input_layer, concat_layer, n_features, is_training, keep_prob, name=None, norm_fn=None, **kwargs): <NEW_LINE> <INDENT> normalizer_params = kwargs.get('normalizer_params', normalizer_params_default) if norm_fn is not None else None <NEW_LINE> if normalizer_params is not None and 'is_training' in normalizer_params and normalizer_params.get('is_training') is None: <NEW_LINE> <INDENT> normalizer_params['is_training'] = is_training <NEW_LINE> <DEDENT> with tf.variable_scope('UNet/up_%s'%(str(n_features) if name is None else name + '_' + str(n_features))): <NEW_LINE> <INDENT> up0a = tc.layers.conv2d_transpose(input_layer, n_features, (3, 3), 2, normalizer_fn=norm_fn, normalizer_params=normalizer_params) <NEW_LINE> up0b = tf.concat([up0a, concat_layer], axis=3) <NEW_LINE> up0c = tc.layers.conv2d(up0b, n_features, (3, 3), normalizer_fn=norm_fn, normalizer_params=normalizer_params) <NEW_LINE> up0d = tc.layers.conv2d(up0c, n_features, (3, 3), normalizer_fn=norm_fn, normalizer_params=normalizer_params) <NEW_LINE> up0e = tc.layers.conv2d(up0d, n_features, (3, 3), normalizer_fn=norm_fn, normalizer_params=normalizer_params) <NEW_LINE> if keep_prob < 1: up0e = tc.layers.dropout(up0e, keep_prob=keep_prob) <NEW_LINE> return up0e
Up block consisting of a deconvolution and three convolutions. :param input_layer: input :param concat_layer: layer that is copied for the convolution (usually output of the corresponding down block) :param n_features: number of features that are created by the conv :param is_training: phase :param keep_prob: for dropout layers :param name: optional name of the layer :param no_max_pool: last down_block of a Unet has no max_pool :param norm_fn: optional normalizer function for conv2d :param kwargs: normalizer_params for norm_fn (otherwise defaults if norm_fn is set) :return: 1st: the max_pool out_put | 2nd: layer for concat during expansion
625941be287bf620b61d3979
def __init__(self): <NEW_LINE> <INDENT> self._x = None
The __init__ is documented.
625941be97e22403b379ceac
def SpliceComments(tree): <NEW_LINE> <INDENT> prev_leaf = [None] <NEW_LINE> _AnnotateIndents(tree) <NEW_LINE> def _VisitNodeRec(node): <NEW_LINE> <INDENT> for child in node.children[:]: <NEW_LINE> <INDENT> if isinstance(child, pytree.Node): <NEW_LINE> <INDENT> _VisitNodeRec(child) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if child.prefix.lstrip().startswith('#'): <NEW_LINE> <INDENT> comment_prefix = child.prefix <NEW_LINE> comment_lineno = child.lineno - comment_prefix.count('\n') <NEW_LINE> child_prefix = child.prefix.lstrip('\n') <NEW_LINE> prefix_indent = child_prefix[:child_prefix.find('#')] <NEW_LINE> child.prefix = '' <NEW_LINE> if child.type == token.NEWLINE: <NEW_LINE> <INDENT> assert prev_leaf[0] is not None <NEW_LINE> pytree_utils.InsertNodesAfter( _CreateCommentsFromPrefix(comment_prefix, comment_lineno, standalone=False), prev_leaf[0]) <NEW_LINE> <DEDENT> elif child.type == token.DEDENT: <NEW_LINE> <INDENT> ancestor_at_indent = _FindAncestorAtIndent(child, prefix_indent) <NEW_LINE> if ancestor_at_indent.type == token.DEDENT: <NEW_LINE> <INDENT> pytree_utils.InsertNodesBefore(_CreateCommentsFromPrefix( comment_prefix, comment_lineno, standalone=True), ancestor_at_indent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pytree_utils.InsertNodesAfter(_CreateCommentsFromPrefix( comment_prefix, comment_lineno, standalone=True), ancestor_at_indent) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stmt_parent = _FindStmtParent(child) <NEW_LINE> for leaf_in_parent in stmt_parent.leaves(): <NEW_LINE> <INDENT> if leaf_in_parent.type == token.NEWLINE: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif id(leaf_in_parent) == id(child): <NEW_LINE> <INDENT> node_with_line_parent = _FindNodeWithStandaloneLineParent(child) <NEW_LINE> pytree_utils.InsertNodesBefore(_CreateCommentsFromPrefix( comment_prefix, comment_lineno, standalone=True), node_with_line_parent) <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if comment_lineno == prev_leaf[0].lineno: <NEW_LINE> <INDENT> comment_lines = comment_prefix.splitlines() <NEW_LINE> value = comment_lines[0].lstrip() <NEW_LINE> comment_leaf = pytree.Leaf(type=token.COMMENT, value=value.rstrip('\n'), context=('', (comment_lineno, 0))) <NEW_LINE> pytree_utils.InsertNodesAfter([comment_leaf], prev_leaf[0]) <NEW_LINE> comment_prefix = '\n'.join(comment_lines[1:]) <NEW_LINE> comment_lineno += 1 <NEW_LINE> <DEDENT> comments = _CreateCommentsFromPrefix(comment_prefix, comment_lineno, standalone=False) <NEW_LINE> pytree_utils.InsertNodesBefore(comments, child) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> prev_leaf[0] = child <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> _VisitNodeRec(tree)
Given a pytree, splice comments into nodes of their own right. Extract comments from the prefixes where they are housed after parsing. The prefixes that previously housed the comments become empty. Args: tree: a pytree.Node - the tree to work on. The tree is modified by this function.
625941be55399d3f055885c6
def analysis_complete(self): <NEW_LINE> <INDENT> self.start_btn.setEnabled(True) <NEW_LINE> self.pause_btn.setEnabled(False) <NEW_LINE> self.stop_btn.setEnabled(False) <NEW_LINE> self.pause_btn.setText('Pause') <NEW_LINE> self.update_status('Ready')
Slot to run once the analysis worker is finished.
625941be55399d3f055885c7
def read_file(path: str) -> (set, int): <NEW_LINE> <INDENT> rel_file = open(path, 'r', encoding='utf-8') <NEW_LINE> relation = set() <NEW_LINE> rel_list = rel_file.readlines() <NEW_LINE> separator = rel_list[0][1] <NEW_LINE> rel_file.close() <NEW_LINE> for i in range(len(rel_list)): <NEW_LINE> <INDENT> rel_list[i] = rel_list[i].split(separator) <NEW_LINE> for j in range(len(rel_list[i])): <NEW_LINE> <INDENT> if rel_list[i][j].strip() == '1': <NEW_LINE> <INDENT> relation.add((i+1, j+1)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return relation, len(rel_list)
Reads relation from file and returns it as a list of tuples and its length.
625941bedd821e528d63b0be
@pytest.mark.manual <NEW_LINE> @pytest.mark.tier(2) <NEW_LINE> def test_dd_multiselect_default_element_is_shouldnt_be_blank_when_loaded_by_another_element(): <NEW_LINE> <INDENT> pass
Polarion: assignee: nansari casecomponent: Services initialEstimate: 1/16h testtype: functional startsin: 5.9 tags: service Bugzilla: 1645555
625941befb3f5b602dac35a4
def check_attempts(card): <NEW_LINE> <INDENT> if card.attempts < settings.MAX_ATTEMPTS: <NEW_LINE> <INDENT> return {"success": True} <NEW_LINE> <DEDENT> card.is_active = False <NEW_LINE> card.save() <NEW_LINE> return {"success": False, "message": ('Your account has been locked ' 'out because of too many ' 'failed login attempts.')}
Count attempts and check MAX_ATTEMPTS limit
625941be851cf427c661a426
def rvs(self): <NEW_LINE> <INDENT> if self.bins_distributions is None: <NEW_LINE> <INDENT> self.create_poisson_distributions() <NEW_LINE> <DEDENT> return np.r_[[i.rvs(1).sum() for i in self.bins_distributions]]
sample from the distribution of each bin
625941be85dfad0860c3ad6d
def eat_delim(self, d): <NEW_LINE> <INDENT> if not self.match_delim(d): <NEW_LINE> <INDENT> raise BadSyntaxException() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__next_token()
Throws an exception if the current token is not the specified delimiter. Otherwise, moves to the next token.
625941be3346ee7daa2b2c7e
def test_ml_int_ml_id_rename(self): <NEW_LINE> <INDENT> NonString = int() <NEW_LINE> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> MailingList().rename_list(mailing_list=NonString, name='Fake')
This method tests that an assertion is raised in the MailingList Module when the user enters a mailing_list_id that is incorrect.
625941beadb09d7d5db6c6a5
def get_titles_of_downloaded_albums(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(TITLES_TO_SKIP_FILE, "r", encoding="utf-8", newline="") as input_file: <NEW_LINE> <INDENT> for row in csv.DictReader(input_file): <NEW_LINE> <INDENT> yield row['bookmark-title'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass
Returns an iterator of the bookmark titles for whom torrents were already downloaded in the previous runs of the program.
625941beaad79263cf390951
def plot_ax(self, ax, what="d", exchange_xy=False, *args, **kwargs): <NEW_LINE> <INDENT> opts = [c.lower() for c in what] <NEW_LINE> cases = {"d": self.dos, "i": self.idos} <NEW_LINE> lines = list() <NEW_LINE> for c in opts: <NEW_LINE> <INDENT> f = cases[c] <NEW_LINE> ls = f.plot_ax(ax, exchange_xy=exchange_xy, *args, **kwargs) <NEW_LINE> lines.extend(ls) <NEW_LINE> <DEDENT> return lines
Helper function to plot the data on the axis ax. Args: ax: matplotlib axis what: string selecting the quantity to plot: "d" for DOS, "i" for IDOS. chars can be concatenated hence what="id" plots both IDOS and DOS. (default "d"). exchange_xy: True to exchange exis args, kwargs: Options passes to matplotlib. Return value is a list of lines that were added.
625941be44b2445a33931fae
def test_create_project(): <NEW_LINE> <INDENT> with FakeProjectContext() as ctx: <NEW_LINE> <INDENT> assert os.path.exists(ctx.path) <NEW_LINE> assert os.path.exists(os.path.join(ctx.path, default_project_file)) <NEW_LINE> assert os.path.exists(os.path.join(ctx.path, default_model_conditionsfp)) <NEW_LINE> GSMProject(ctx.path)
Create a test project with an example e coli model
625941bebe383301e01b539f
def test(self,my_round,set_to_use='test'): <NEW_LINE> <INDENT> assert set_to_use in ['train', 'test'] <NEW_LINE> if set_to_use == 'train': <NEW_LINE> <INDENT> data = self.train_data <NEW_LINE> <DEDENT> elif set_to_use == 'test': <NEW_LINE> <INDENT> data = self.eval_data <NEW_LINE> <DEDENT> self.model.set_params(self.model_para) <NEW_LINE> metrics = self.model.test(data) <NEW_LINE> if metrics['accuracy']<=0.01: <NEW_LINE> <INDENT> print(11111) <NEW_LINE> <DEDENT> metrics['time'] = self.record_time[my_round] <NEW_LINE> return metrics
Tests self.model on self.test_data. Args: set_to_use. Set to test on. Should be in ['train', 'test']. Return: dict of metrics returned by the model.
625941be283ffb24f3c55818
def delete_object(self, iden, uid, token): <NEW_LINE> <INDENT> self.obj_str.delete_object(uid, token, iden)
Delete a data object. :param iden: Id of the object. :param uid: Identifier for the user. :param token: The token of the user.
625941be293b9510aa2c31ac
def login(self): <NEW_LINE> <INDENT> pass
Login to a terminal, using I/O specific (rather than language-specific) routines. Uses the username and password of the BaseLanguageInput
625941be23849d37ff7b2fa4
def cls_player_sleep(self, mac_player: str, seconds_before_sleep: int) -> None: <NEW_LINE> <INDENT> payload = ( '{"id": 0, "params": ["' + mac_player + '",["sleep","' + str(seconds_before_sleep) + '"]],"method": "slim.request"}' ) <NEW_LINE> self._cls_execute_request(payload) <NEW_LINE> print("Player goind to sleep in: " + str(seconds_before_sleep) + " seconds")
player on or off input : mac_player: str, the player mac address, ie: 5a:65:a2:33:80:79 : seconds_before_sleep: number of seconds before sleep returns : None
625941be4e696a04525c9360
def createMountain(x, y, xc, yc, rm, h0max): <NEW_LINE> <INDENT> nx = len(x) <NEW_LINE> ny = len(y) <NEW_LINE> h0 = np.zeros([nx,ny]) <NEW_LINE> for i in xrange(0,nx): <NEW_LINE> <INDENT> for j in xrange(0,ny): <NEW_LINE> <INDENT> dist = np.sqrt((x[i] - xc)**2 + (y[j] - yc)**2) <NEW_LINE> if dist < rm: <NEW_LINE> <INDENT> h0[i,j] = h0max*(1-dist/rm) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return h0
create the mountain at locations x and y
625941be0c0af96317bb80fc
def _create(self): <NEW_LINE> <INDENT> log_method_call(self, self.name, status=self.status) <NEW_LINE> fd = os.open(self.path, os.O_WRONLY|os.O_CREAT|os.O_TRUNC) <NEW_LINE> os.ftruncate(fd, self.size) <NEW_LINE> os.close(fd)
Create a sparse file.
625941bee5267d203edcdbb3
def handle_getfield_gc(self, op): <NEW_LINE> <INDENT> self.emit_pending_zeros() <NEW_LINE> self.emit_op(op)
See test_zero_ptr_field_before_getfield(). We hope there is no getfield_gc in the middle of initialization code, but there shouldn't be, given that a 'new' is already delayed by previous optimization steps. In practice it should immediately be followed by a bunch of 'setfields', and the 'pending_zeros' optimization we do here is meant for this case.
625941be5fc7496912cc3892
def get_bgp_instance(self, **kwargs): <NEW_LINE> <INDENT> module = kwargs["module"] <NEW_LINE> conf_str = CE_GET_BGP_INSTANCE <NEW_LINE> xml_str = self.netconf_get_config(module=module, conf_str=conf_str) <NEW_LINE> result = list() <NEW_LINE> if "<data/>" in xml_str: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> re_find = re.findall( r'.*<vrfName>(.*)</vrfName>.*', xml_str) <NEW_LINE> if re_find: <NEW_LINE> <INDENT> return re_find <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result
get_bgp_instance
625941bef9cc0f698b140511
def test_search_insert(): <NEW_LINE> <INDENT> binary_tree = ctypes.CDLL(dll_path) <NEW_LINE> binary_tree.create_binary_tree.restype = ctypes.c_void_p <NEW_LINE> b1 = binary_tree.create_binary_tree(ctypes.c_int(10000)) <NEW_LINE> vals = random.sample(range(-500, 500), random.randint(100, 150)) <NEW_LINE> for val in vals: <NEW_LINE> <INDENT> binary_tree.insert(ctypes.c_void_p(b1), val) <NEW_LINE> <DEDENT> for k in range(-500, 500): <NEW_LINE> <INDENT> if k in vals: <NEW_LINE> <INDENT> assert binary_tree.search(ctypes.c_void_p(b1), k) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert not binary_tree.search(ctypes.c_void_p(b1), k) <NEW_LINE> <DEDENT> <DEDENT> binary_tree.delete_binary_tree(ctypes.c_void_p(b1))
Insert random values into tree, test that these values have been inserted and no others
625941be8e05c05ec3eea286
def _check_keyword_parentheses(self, tokens, start): <NEW_LINE> <INDENT> if self._inside_brackets(":") and tokens[start][1] == "for": <NEW_LINE> <INDENT> self._pop_token() <NEW_LINE> <DEDENT> if tokens[start + 1][1] != "(": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> found_and_or = False <NEW_LINE> depth = 0 <NEW_LINE> keyword_token = str(tokens[start][1]) <NEW_LINE> line_num = tokens[start][2][0] <NEW_LINE> for i in range(start, len(tokens) - 1): <NEW_LINE> <INDENT> token = tokens[i] <NEW_LINE> if token[0] == tokenize.NL: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if token[1] == "(": <NEW_LINE> <INDENT> depth += 1 <NEW_LINE> <DEDENT> elif token[1] == ")": <NEW_LINE> <INDENT> depth -= 1 <NEW_LINE> if depth: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if tokens[i + 1][1] in (":", ")", "]", "}", "in") or tokens[i + 1][ 0 ] in (tokenize.NEWLINE, tokenize.ENDMARKER, tokenize.COMMENT): <NEW_LINE> <INDENT> if i == start + 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if keyword_token == "not": <NEW_LINE> <INDENT> if not found_and_or: <NEW_LINE> <INDENT> self.add_message( "superfluous-parens", line=line_num, args=keyword_token ) <NEW_LINE> <DEDENT> <DEDENT> elif keyword_token in ("return", "yield"): <NEW_LINE> <INDENT> self.add_message( "superfluous-parens", line=line_num, args=keyword_token ) <NEW_LINE> <DEDENT> elif keyword_token not in self._keywords_with_parens: <NEW_LINE> <INDENT> if not found_and_or: <NEW_LINE> <INDENT> self.add_message( "superfluous-parens", line=line_num, args=keyword_token ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> elif depth == 1: <NEW_LINE> <INDENT> if token[1] == ",": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if token[1] in ("and", "or"): <NEW_LINE> <INDENT> found_and_or = True <NEW_LINE> <DEDENT> elif token[1] == "yield": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif token[1] == "for": <NEW_LINE> <INDENT> return
Check that there are not unnecessary parens after a keyword. Parens are unnecessary if there is exactly one balanced outer pair on a line, and it is followed by a colon, and contains no commas (i.e. is not a tuple). Args: tokens: list of Tokens; the entire list of Tokens. start: int; the position of the keyword in the token list.
625941be4e4d5625662d42ef
def day11a(): <NEW_LINE> <INDENT> rows = helpers.get_input_strings("day11") <NEW_LINE> length = len(rows) <NEW_LINE> width = len(rows[0]) <NEW_LINE> print(f"Length: {length}, Width: {width}") <NEW_LINE> helpers.print_rows(rows) <NEW_LINE> limit = 4 <NEW_LINE> recurse = False <NEW_LINE> done = False <NEW_LINE> i = 0 <NEW_LINE> while not done: <NEW_LINE> <INDENT> print(f"\nIteration {i}:\n") <NEW_LINE> new_rows = helpers.perform_seating(rows, length, width, limit, recurse) <NEW_LINE> helpers.print_rows(new_rows) <NEW_LINE> if new_rows == rows: <NEW_LINE> <INDENT> done = True <NEW_LINE> <DEDENT> rows = new_rows <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> occupied = 0 <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> for c in row: <NEW_LINE> <INDENT> if c == "#": <NEW_LINE> <INDENT> occupied += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print(f"Iterations: {i}") <NEW_LINE> print(f"Occupied: {occupied}")
Day 11a: Seating System.
625941bee5267d203edcdbb4
def _parse_libsvm(scope, model, inputs): <NEW_LINE> <INDENT> return _parse_libsvm_simple_model(scope, model, inputs)
This is a delegate function. It doesn't nothing but invoke the correct parsing function according to the input model's type. :param scope: Scope object :param model: A scikit-learn object (e.g., OneHotEncoder and LogisticRegression) :param inputs: A list of variables :return: The output variables produced by the input model
625941bed10714528d5ffbf4
def set_text(): <NEW_LINE> <INDENT> if len(sys.argv[1:]) == 0: <NEW_LINE> <INDENT> phrase = 'Hello' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> phrase = sys.argv[1] <NEW_LINE> del sys.argv[1] <NEW_LINE> <DEDENT> return phrase
Must be called before _parse_args() or it will not work
625941be73bcbd0ca4b2bf8a
def remove_watch(self, path, superficial=False): <NEW_LINE> <INDENT> wd = self.__watches.get(path) <NEW_LINE> if wd is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _LOGGER.debug("Removing watch for watch-handle (%d): [%s]", wd, path) <NEW_LINE> del self.__watches[path] <NEW_LINE> self.remove_watch_with_id(wd, superficial)
Remove our tracking information and call inotify to stop watching the given path. When a directory is removed, we'll just have to remove our tracking since inotify already cleans-up the watch.
625941be460517430c3940a0
def get_domain_url (self): <NEW_LINE> <INDENT> if isinstance(self.topoAdapter, AbstractRESTAdapter): <NEW_LINE> <INDENT> return self.topoAdapter.URL
:return: Return the configured domain URL extracted from DomainAdapter. :rtype: str
625941be56ac1b37e62640e8
def patch_view(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.process_update_request(request, *args, **kwargs) <NEW_LINE> with transaction.atomic(): <NEW_LINE> <INDENT> self.patch_view_before(request, *args, **kwargs) <NEW_LINE> assert self.form_data.get('id') == self.id and self.id, {'message': '传入参数不正确!', 'status': 412} <NEW_LINE> query_dict = model_to_dict_plus(self.queryset, None, None, None, True) <NEW_LINE> query_dict.update(self.form_data) <NEW_LINE> try: <NEW_LINE> <INDENT> new_obj = self.model(**query_dict) <NEW_LINE> new_obj.save() <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> raise Exception({'message': '保存资源出错', 'status': 500}) <NEW_LINE> <DEDENT> except IntegrityError as e: <NEW_LINE> <INDENT> error = e.args[0] if e.args else '' <NEW_LINE> raise Exception({'message': '违反唯一性约束,原始错误信息为:{}'.format(error), 'status': 409}) <NEW_LINE> <DEDENT> result = model_to_dict_plus(new_obj) <NEW_LINE> return result
patch请求用与更新部分字段 :param request: :param args: :param kwargs: :return: 更新后的字段
625941be6aa9bd52df036cb7
def detail(self, image_id): <NEW_LINE> <INDENT> url = self._url + '/' + image_id <NEW_LINE> resp = self._sess.get(url) <NEW_LINE> return resp.json
Retreive details of a server image :param server_id: numerical id of image to detail
625941be99fddb7c1c9de2a6
def minimization_loop_closing(pose, loops): <NEW_LINE> <INDENT> pose.dump_pdb('debug/before_minimize.pdb') <NEW_LINE> ft = rosetta.core.kinematics.FoldTree() <NEW_LINE> ft.add_edge(1, 14, 1) <NEW_LINE> ft.add_edge(1, 38, 2) <NEW_LINE> ft.add_edge(1, 48, 3) <NEW_LINE> ft.add_edge(1, 12, -1) <NEW_LINE> ft.add_edge(14, 13, -1) <NEW_LINE> ft.add_edge(14, 36, -1) <NEW_LINE> ft.add_edge(38, 37, -1) <NEW_LINE> ft.add_edge(38, 46, -1) <NEW_LINE> ft.add_edge(48, 47, -1) <NEW_LINE> ft.add_edge(48, 54, -1) <NEW_LINE> pose.fold_tree(ft) <NEW_LINE> rosetta.core.pose.correctly_add_cutpoint_variants(pose) <NEW_LINE> min_mover = rosetta.protocols.simple_moves.MinMover() <NEW_LINE> mm = rosetta.core.kinematics.MoveMap() <NEW_LINE> mm.set_jump(1, True) <NEW_LINE> for loop in loops: <NEW_LINE> <INDENT> for seqpos in range(loop[0], loop[1] + 1): <NEW_LINE> <INDENT> mm.set_bb(seqpos, True) <NEW_LINE> <DEDENT> <DEDENT> min_opts = rosetta.core.optimization.MinimizerOptions( "lbfgs_armijo_nonmonotone", 0.01, True ) <NEW_LINE> sfxn = rosetta.core.scoring.get_score_function() <NEW_LINE> sfxn.set_weight(rosetta.core.scoring.linear_chainbreak, 10) <NEW_LINE> min_mover.score_function(sfxn) <NEW_LINE> min_mover.movemap(mm) <NEW_LINE> min_mover.min_options(min_opts) <NEW_LINE> min_mover.apply(pose) <NEW_LINE> fast_loop_build(pose, loops)
Close the loops of a pose by minimization.
625941be1f5feb6acb0c4a68
def run_proc(proc, retcode, timeout = None): <NEW_LINE> <INDENT> _register_proc_timeout(proc, timeout) <NEW_LINE> stdout, stderr = proc.communicate() <NEW_LINE> proc._end_time = time.time() <NEW_LINE> if not stdout: <NEW_LINE> <INDENT> stdout = six.b("") <NEW_LINE> <DEDENT> if not stderr: <NEW_LINE> <INDENT> stderr = six.b("") <NEW_LINE> <DEDENT> if getattr(proc, "encoding", None): <NEW_LINE> <INDENT> stdout = stdout.decode(proc.encoding, "ignore") <NEW_LINE> stderr = stderr.decode(proc.encoding, "ignore") <NEW_LINE> <DEDENT> return _check_process(proc, retcode, timeout, stdout, stderr)
Waits for the given process to terminate, with the expected exit code :param proc: a running Popen-like object :param retcode: the expected return (exit) code of the process. It defaults to 0 (the convention for success). If ``None``, the return code is ignored. It may also be a tuple (or any object that supports ``__contains__``) of expected return codes. :param timeout: the number of seconds (a ``float``) to allow the process to run, before forcefully terminating it. If ``None``, not timeout is imposed; otherwise the process is expected to terminate within that timeout value, or it will be killed and :class:`ProcessTimedOut <plumbum.cli.ProcessTimedOut>` will be raised :returns: A tuple of (return code, stdout, stderr)
625941bed18da76e235323e7
def resize_image(self, name, framesize): <NEW_LINE> <INDENT> displayimg = self.previewtrain[name][0] <NEW_LINE> if framesize: <NEW_LINE> <INDENT> frameratio = float(framesize[0]) / float(framesize[1]) <NEW_LINE> imgratio = float(displayimg.size[0]) / float(displayimg.size[1]) <NEW_LINE> if frameratio <= imgratio: <NEW_LINE> <INDENT> scale = framesize[0] / float(displayimg.size[0]) <NEW_LINE> size = (framesize[0], int(displayimg.size[1] * scale)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> scale = framesize[1] / float(displayimg.size[1]) <NEW_LINE> size = (int(displayimg.size[0] * scale), framesize[1]) <NEW_LINE> <DEDENT> for i in range(0, 1000): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> displayimg = displayimg.resize(size, Image.ANTIALIAS) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> if i == 999: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.previewtrain[name][1] = ImageTk.PhotoImage(displayimg)
Resize the training preview image based on the passed in frame size
625941be94891a1f4081b9bc
def __init__(self, window): <NEW_LINE> <INDENT> egg.gui.GridLayout.__init__(self) <NEW_LINE> self._layout.setContentsMargins(0,0,0,0) <NEW_LINE> self._window = window <NEW_LINE> self.button_save = self.place_object(egg.gui.Button("Save")) <NEW_LINE> self.button_load = self.place_object(egg.gui.Button("Load")) <NEW_LINE> self.image = self.place_object(egg.pyqtgraph.ImageView(), 0,1, column_span=3, alignment=0) <NEW_LINE> self.set_column_stretch(2,10) <NEW_LINE> self.data = 0.0 <NEW_LINE> self._window.connect(self.button_save.signal_clicked, self.button_save_clicked) <NEW_LINE> self._window.connect(self.button_load.signal_clicked, self.button_load_clicked)
This object is a grid layout containing an image with save / load buttons. You must supply a window object so that it can connect buttons to actions, etc.
625941be796e427e537b04d8
def __init__(self): <NEW_LINE> <INDENT> self._current_date = 0 <NEW_LINE> self._holdings = dict() <NEW_LINE> self._members = dict()
Initializes library inventory/members and sets current date to 0
625941be7d847024c06be1cd
def __init__(self): <NEW_LINE> <INDENT> self.Platforms = None <NEW_LINE> self.LicenseIds = None <NEW_LINE> self.Offset = None <NEW_LINE> self.Limit = None
:param Platforms: 平台集合。 :type Platforms: list of str :param LicenseIds: 平台绑定的 license Id 集合。 :type LicenseIds: list of str :param Offset: 分页返回的起始偏移量,默认值:0。 :type Offset: int :param Limit: 分页返回的记录条数,默认值:10。 :type Limit: int
625941bede87d2750b85fca4
def get_coords(image): <NEW_LINE> <INDENT> boxes = [] <NEW_LINE> s3 = aws.get_s3_resource() <NEW_LINE> image_uri = image.get("imageURI") <NEW_LINE> output = { "image_uri": image_uri } <NEW_LINE> positions = image.get("positions") <NEW_LINE> single_box = image.get("single-box") is True <NEW_LINE> text_data = get_text_index(s3, image_uri) <NEW_LINE> if text_data is None: <NEW_LINE> <INDENT> return output <NEW_LINE> <DEDENT> o_width = text_data.get("width") <NEW_LINE> o_height = text_data.get("height") <NEW_LINE> canvas_width = text_data.get("canvas_width") <NEW_LINE> canvas_height = text_data.get("canvas_height") <NEW_LINE> if o_width is None or o_height is None: <NEW_LINE> <INDENT> output = { "imageURI": image_uri, "message": "no coordinates available" } <NEW_LINE> return output <NEW_LINE> <DEDENT> word_index = text_data.get("word_index") <NEW_LINE> start_index = text_data.get("start_index") <NEW_LINE> if 'width' in image: <NEW_LINE> <INDENT> width = image.get("width") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> width = canvas_width <NEW_LINE> <DEDENT> if 'height' in image: <NEW_LINE> <INDENT> height = image.get("height") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> height = canvas_height <NEW_LINE> <DEDENT> scale_w = float(width) / float(o_width) <NEW_LINE> scale_h = float(height) / float(o_height) <NEW_LINE> if word_index is None: <NEW_LINE> <INDENT> raise Exception <NEW_LINE> <DEDENT> for phrase_or_position in positions: <NEW_LINE> <INDENT> if isinstance(phrase_or_position, int): <NEW_LINE> <INDENT> idx = start_index.get(str(phrase_or_position)) <NEW_LINE> if idx is None: <NEW_LINE> <INDENT> p_boxes = [get_null_box_object(1)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> word_data = word_index[idx] <NEW_LINE> p_boxes = box_join(get_box(word_data, scale_w, scale_h), single_box=single_box) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> phrase_boxes = [] <NEW_LINE> p_boxes = None <NEW_LINE> for position in phrase_or_position: <NEW_LINE> <INDENT> idx = start_index.get(str(position)) <NEW_LINE> if idx is None: <NEW_LINE> <INDENT> p_boxes = [get_null_box_object(len(phrase_or_position))] <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> word_data = word_index[idx] <NEW_LINE> position_box = get_box(word_data, scale_w, scale_h) <NEW_LINE> phrase_boxes.append(position_box) <NEW_LINE> <DEDENT> <DEDENT> if not p_boxes: <NEW_LINE> <INDENT> p_boxes = box_join(phrase_boxes, single_box=single_box) <NEW_LINE> <DEDENT> <DEDENT> boxes.append(p_boxes) <NEW_LINE> <DEDENT> output['phrases'] = boxes <NEW_LINE> return output
{"positions": [[25, 31], 100, 110], "height": 768, "width": 1024, "imageURI": "http://aplaceforstuff.co.uk/x/"}
625941be99fddb7c1c9de2a7
def focusInEvent(self, event): <NEW_LINE> <INDENT> self.focused_in.emit(event) <NEW_LINE> super(CodeEdit, self).focusInEvent(event)
Overrides focusInEvent to emits the focused_in signal :param event: QFocusEvent
625941be85dfad0860c3ad6e
def test_qdel_hstry_jobs_rerun(self): <NEW_LINE> <INDENT> a = {'job_history_enable': 'True', 'job_history_duration': '5', 'job_requeue_timeout': '5', 'node_fail_requeue': '5', 'scheduler_iteration': '5'} <NEW_LINE> self.server.manager(MGR_CMD_SET, SERVER, a) <NEW_LINE> j = Job() <NEW_LINE> jid = self.server.submit(j) <NEW_LINE> self.server.expect(JOB, {'job_state': 'R'}, id=jid) <NEW_LINE> self.server.manager(MGR_CMD_SET, SERVER, {'scheduling': 'False'}) <NEW_LINE> self.mom.stop() <NEW_LINE> try: <NEW_LINE> <INDENT> self.server.deljob(jid) <NEW_LINE> <DEDENT> except PbsDeljobError as e: <NEW_LINE> <INDENT> err_msg = "could not connect to MOM" <NEW_LINE> self.assertTrue(err_msg in e.msg[0], "Did not get the expected message") <NEW_LINE> self.assertTrue(e.rc != 0, "Exit code shows success") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise self.failureException("qdel job did not return error") <NEW_LINE> <DEDENT> self.server.expect(JOB, {'job_state': 'Q'}, id=jid) <NEW_LINE> self.mom.start() <NEW_LINE> self.server.manager(MGR_CMD_SET, SERVER, {'scheduling': 'True'}) <NEW_LINE> a = {'job_state': 'F', 'substate': '92'} <NEW_LINE> self.server.expect(JOB, a, extend='x', offset=1, id=jid, interval=1)
Test rerunning a history job that was prematurely terminated due to a a downed mom.
625941be56b00c62f0f1456c
def docker_stop(container_name): <NEW_LINE> <INDENT> _logger.info('Stopping container %s', container_name) <NEW_LINE> dstop = subprocess.run(['docker', 'stop', container_name])
Stops the container named container_name
625941be29b78933be1e55c5
def execute(self, sql, parameters = ()): <NEW_LINE> <INDENT> with self.connection.cursor() as cursor: <NEW_LINE> <INDENT> cursor.execute(sql, parameters) <NEW_LINE> self.lastrowid = cursor.lastrowid <NEW_LINE> <DEDENT> self.connection.commit() <NEW_LINE> return self.lastrowid
Execute a general request - Insert, Update, Delete. Parameters ---------- sql : string parameters : set
625941bed6c5a10208143f5d
def addToDatabase(databaseValues, listOfItems, sessionId): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = pymysql.connect(user=databaseValues['user'], host=databaseValues['host'], password=databaseValues['password'], database=databaseValues['name'], port=int(databaseValues['port'])) <NEW_LINE> curs = conn.cursor() <NEW_LINE> rawTimestamp = datetime.datetime.now() <NEW_LINE> timestamp = rawTimestamp.strftime('%Y-%m-%d %H:%M:%S') <NEW_LINE> timestampFractions = rawTimestamp.microsecond <NEW_LINE> print('Addition timestamp') <NEW_LINE> print(timestamp) <NEW_LINE> tempList = [] <NEW_LINE> for index in listOfItems: <NEW_LINE> <INDENT> channelId = index <NEW_LINE> measurementValue = listOfItems[index] <NEW_LINE> tempList.append((sessionId, channelId, timestamp, timestampFractions, measurementValue)) <NEW_LINE> <DEDENT> addValues = str(tempList) <NEW_LINE> addValues = addValues[1:-1] <NEW_LINE> sql1 = "Insert INTO " "measurements(fk_sessions_measurements, " "fk_channels_measurements, " "timestamp_measurements, timestampfractions_measurements, " "data_measurements) " "VALUES" + addValues <NEW_LINE> curs.execute(sql1) <NEW_LINE> conn.commit() <NEW_LINE> conn.close() <NEW_LINE> return True <NEW_LINE> <DEDENT> except TypeError as T: <NEW_LINE> <INDENT> print('add_to_database Typerror: ') <NEW_LINE> print(T) <NEW_LINE> return False <NEW_LINE> <DEDENT> except pymysql.err.Error as E: <NEW_LINE> <INDENT> print('add_to_database MySQL error: ') <NEW_LINE> print(E) <NEW_LINE> raise E <NEW_LINE> <DEDENT> except pymysql.err.IntegrityError as E2: <NEW_LINE> <INDENT> print('add_to_database MySQL IntegrityError: ') <NEW_LINE> print(E2) <NEW_LINE> raise E2
Adds measurements to the database from a list of values :param databaseValues: a python dictionary containing MySQL connection values Example: {'user': 'root', 'host': '127.0.0.1', 'password': '1234', 'name': 'databasename', 'port': '3306'} :param listOfItems: a dictionary containing channel id's as keys and measurements as values Example: {1: 25.6, 2: 22.3} :param sessionId: an int representing the id of the session that collected the measurements :return: True if something was added to the database, false otherwise
625941befb3f5b602dac35a5
def set_response_A(self, text): <NEW_LINE> <INDENT> self.response_A = text <NEW_LINE> self.button_A = Button(BLACK, self.x + 10, self.y + 120, self.width -20, 50, "A", text = self.response_A, text_color = WHITE)
Sets the text which will appear in button A
625941be57b8e32f524833ae
@app.route("/") <NEW_LINE> def home(): <NEW_LINE> <INDENT> return render_template("madlib.html")
display madlib story shell choice
625941be30bbd722463cbcd8
def process_data(path_to_event, path_to_time, time_scale=1.0): <NEW_LINE> <INDENT> if not os.path.isfile(path_to_event): <NEW_LINE> <INDENT> raise ValueError("Path {0} does not exist!".format(path_to_event)) <NEW_LINE> <DEDENT> if not os.path.isfile(path_to_time): <NEW_LINE> <INDENT> raise ValueError("Path {0} does not exist!".format(path_to_time)) <NEW_LINE> <DEDENT> events = [] <NEW_LINE> times = [] <NEW_LINE> with open(path_to_event, 'r', encoding="utf8") as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> events.append([int(ii) for ii in line.split()]) <NEW_LINE> <DEDENT> <DEDENT> with open(path_to_time, 'r', encoding="utf8") as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> times.append([float(ii) for ii in line.split()]) <NEW_LINE> <DEDENT> <DEDENT> num_events = np.max([np.max(event) for event in events]) <NEW_LINE> X = [] <NEW_LINE> Y = [] <NEW_LINE> for event, time in zip(events, times): <NEW_LINE> <INDENT> e = np.array(event) - 1 <NEW_LINE> e_one_hot = (np.arange(num_events) == e[...,None]).astype(float) <NEW_LINE> t = np.array(time) <NEW_LINE> t_diff = t <NEW_LINE> t_diff[1:] = (t_diff[1:] - t[:-1]) * time_scale <NEW_LINE> t_diff[0] = -1.0 <NEW_LINE> x = np.zeros((len(event)-1, num_events+1)) <NEW_LINE> x[:,:num_events] = e_one_hot[:-1] <NEW_LINE> x[:,-1] = t_diff[:-1] <NEW_LINE> y = np.zeros((len(event)-1, 2)) <NEW_LINE> y[:,0] = e[1:] <NEW_LINE> y[:,1] = t_diff[1:] <NEW_LINE> X.append(x) <NEW_LINE> Y.append(y) <NEW_LINE> <DEDENT> X = np.concatenate(X, axis=0) <NEW_LINE> Y = np.concatenate(Y, axis=0) <NEW_LINE> return X, Y, num_events
Process event and time data into ndarrays: X = [one_hot_Y_j-1 + dt_j-1], Y = [Y_j, dt_j]
625941be1f037a2d8b946114
def test_infrastructure_item_edit_out(self): <NEW_LINE> <INDENT> response = self.client.get(reverse('infrastructure_item_edit', args=[self.item.id])) <NEW_LINE> self.assertRedirects(response, reverse('user_login'))
Testing /infrastructure/item/edit/<item_id>
625941be07f4c71912b11395
def checkForUsageMessage(self, args): <NEW_LINE> <INDENT> if (len(args) < 2) or ("-h" in args) or ("--help" in args): <NEW_LINE> <INDENT> raise RuntimeError(self.USAGE_MESSAGE)
Check arguments and raise RuntimeError containing usage/help message if appropriate. If command line arguments are empty (apart from program name) or "-h" or "--help" was specified, then the exception is raised. Arguments: args -- List of command line arguments given
625941bef7d966606f6a9f16
def update(self, document): <NEW_LINE> <INDENT> document = CollectionHandler.convert_ids(document) <NEW_LINE> return self.collection_handle.update(document)
Updates the given document in database. :param document: Dictionary with _id :return: document
625941be45492302aab5e1d5
def addElementLabel(self, label): <NEW_LINE> <INDENT> if len(label.keys()) != 0: <NEW_LINE> <INDENT> self.useDefault = False
load default values only for new elements with no xmoto_label
625941be566aa707497f4482
def calculate_global_centiles_for_orgs(self, org_type): <NEW_LINE> <INDENT> extra_fields = [] <NEW_LINE> for col in self._get_col_aliases("numerator"): <NEW_LINE> <INDENT> extra_fields.append("num_" + col) <NEW_LINE> <DEDENT> for col in self._get_col_aliases("denominator"): <NEW_LINE> <INDENT> extra_fields.append("denom_" + col) <NEW_LINE> <DEDENT> extra_select_sql = "" <NEW_LINE> for f in extra_fields: <NEW_LINE> <INDENT> extra_select_sql += ", global_deciles.%s as %s" % (f, f) <NEW_LINE> <DEDENT> if self.measure.is_cost_based and self.measure.is_percentage: <NEW_LINE> <INDENT> extra_select_sql += ( ", global_deciles.cost_per_denom AS cost_per_denom" ", global_deciles.cost_per_num AS cost_per_num" ) <NEW_LINE> <DEDENT> context = {"extra_select_sql": extra_select_sql} <NEW_LINE> self.insert_rows_from_query( "global_deciles_{}s".format(org_type), self.table_name("global"), context )
Adds centiles to the already-existing centiles table
625941be96565a6dacc8f5e1
def _version_output(service): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> version_output = check_output([service, VERSION_FLAG]).decode('utf-8').strip() <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> version_output = '' <NEW_LINE> <DEDENT> return version_output
Return the output of the command `service` with the argument '--version'. Args: service (str): The service to check the version of. Returns: str: The output of the command `service` with the argument '--version', or an empty string if the command is not installed. Example: >>> _version_output('snips-nlu') 'snips-nlu 1.1.2 (0.62.3) [model_version: 0.19.0]'
625941bead47b63b2c509e95
def _deobfuscate(self): <NEW_LINE> <INDENT> hashmod = 256 <NEW_LINE> password = base64.b64decode(self.__password).decode('UTF-8') <NEW_LINE> hash_value = self._compute_hash(self.__host + self.__username) % hashmod <NEW_LINE> crypt = chr(hash_value & 0xFF) * len(password) <NEW_LINE> password_final = [] <NEW_LINE> for n in range(0, len(password)): <NEW_LINE> <INDENT> password_final.append(ord(password[n]) ^ ord(crypt[n])) <NEW_LINE> <DEDENT> decrypted_pwd = '' <NEW_LINE> for ci in password_final: <NEW_LINE> <INDENT> if ci == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> decrypted_pwd += chr(ci) <NEW_LINE> <DEDENT> return decrypted_pwd
Convert the obfuscated string to the actual password in clear text Functionality taken from the perl module VICredStore.pm since the goal was to emulate its behaviour.
625941be32920d7e50b280e2
def generate_batch(): <NEW_LINE> <INDENT> with open(f'{PAPERCUT / "config.properties"}', 'r') as config_file: <NEW_LINE> <INDENT> clean_config = config_file.read() <NEW_LINE> working_config = copy.deepcopy(clean_config) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(DEVICES, 'r') as device_list: <NEW_LINE> <INDENT> next(device_list) <NEW_LINE> devices_csv = csv.DictReader(device_list) <NEW_LINE> machine_info = [[line['Device'].replace('device\\', ''), line['Device groups'].replace('registration', '').strip('|')] for line in devices_csv if 'Smart' not in line['Device type']] <NEW_LINE> <DEDENT> with open(PASSWORDS, 'r') as device_passwords: <NEW_LINE> <INDENT> passwords_csv = csv.DictReader(device_passwords) <NEW_LINE> password_info = {line['Device']: line['Password'] for line in passwords_csv} <NEW_LINE> <DEDENT> print(f'\n Generating software packages for {len(machine_info)} machines...') <NEW_LINE> for name, group in tqdm(machine_info): <NEW_LINE> <INDENT> machine = Bundle(name, group) <NEW_LINE> machine.get_password(password_info) <NEW_LINE> machine.generate_config(working_config).zip_files() <NEW_LINE> with open(PAPERCUT / 'config.properties', 'w') as restore_config: <NEW_LINE> <INDENT> restore_config.write(clean_config) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> with open(f'{PAPERCUT / "config.properties"}', 'w') as config_file: <NEW_LINE> <INDENT> config_file.write(clean_config) <NEW_LINE> <DEDENT> print(' Clean config.properties file restored.')
Main function - Reads the clean 'properties.config' file from the 'papercut_software' folder and creates a working copy to use with the class methods so that a clean copy can be used each loop or restored if something crashes and then calls the methods to get all of the specific info, generate the config file, and then output the final, sorted .zip file bundles.
625941be4527f215b584c36f
def list_app_keys(self, app_id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> return self.list_app_keys_with_http_info(app_id, **kwargs)
List App Keys # noqa: E501 Lists all API keys for a given app. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_app_keys(app_id, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str app_id: Identifies the app. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: AppKeyListResponse If the method is called asynchronously, returns the request thread.
625941be29b78933be1e55c6
def __sum_sub_cts(self, ts_sum, ts_new, mult=1, neg=True): <NEW_LINE> <INDENT> if ts_sum is None: <NEW_LINE> <INDENT> ts_sum = np.zeros(len(self.sample_time)) <NEW_LINE> <DEDENT> start_time = ts_new[0, 0] <NEW_LINE> start_charge =ts_new[0, 1] <NEW_LINE> xx = ts_new[:, 0] - start_time <NEW_LINE> yy = ts_new[:, 1] - start_charge <NEW_LINE> try: <NEW_LINE> <INDENT> ts_spline = inter.interp1d(xx, yy) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> ts_spline = inter.interp1d(xx, yy) <NEW_LINE> <DEDENT> last_incr = 0 <NEW_LINE> for i in range(len(self.sample_time)): <NEW_LINE> <INDENT> if ts_new[0, 0] <= self.sample_time[i] <= ts_new[-1, 0]: <NEW_LINE> <INDENT> incr = ts_spline(self.sample_time[i]-ts_new[0, 0]) * mult <NEW_LINE> if incr > 0 or neg: <NEW_LINE> <INDENT> last_incr = incr <NEW_LINE> ts_sum[i] += ts_spline(self.sample_time[i]-ts_new[0, 0]) <NEW_LINE> <DEDENT> <DEDENT> elif self.sample_time[i] > ts_new[-1, 0]: <NEW_LINE> <INDENT> ts_sum[i] += last_incr <NEW_LINE> <DEDENT> <DEDENT> return ts_sum
Args: ts_sum: ts_new: mult: neg:
625941bee76e3b2f99f3a72a
def isValid(text): <NEW_LINE> <INDENT> return bool(re.search(r'\b(story)\b', text, re.IGNORECASE))
Returns True if the input is related to the news. Arguments: text -- user-input, typically transcribed speech
625941be6aa9bd52df036cb8
def test_restore_SessionState_metadata_restores_app_blob(self): <NEW_LINE> <INDENT> obj_repr = copy.copy(self.good_repr) <NEW_LINE> obj_repr['metadata']['app_blob'] = "YmxvYg==" <NEW_LINE> self.resume_fn(self.session, obj_repr) <NEW_LINE> self.assertEqual(self.session.metadata.app_blob, b"blob")
verify that _restore_SessionState_metadata() restores ``title``
625941be4a966d76dd550f22
def retype(self, newtype): <NEW_LINE> <INDENT> SupplyFeed.feed_types[self.type_].remove(self) <NEW_LINE> if not SupplyFeed.feed_types[self.type_]: <NEW_LINE> <INDENT> del SupplyFeed.feed_types[self.type_] <NEW_LINE> <DEDENT> self.type_ = newtype <NEW_LINE> if newtype not in SupplyFeed.feed_types: <NEW_LINE> <INDENT> SupplyFeed.feed_types[newtype] = [] <NEW_LINE> <DEDENT> SupplyFeed.feed_types[newtype].append(self)
Change the type of this SupplyFeed.
625941be32920d7e50b280e3
def get_edge_colors(im): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for x in range(im.size[0]): <NEW_LINE> <INDENT> result.append(im.getpixel((x, 0))) <NEW_LINE> result.append(im.getpixel((x, im.size[1] - 1))) <NEW_LINE> <DEDENT> for y in range(1, im.size[1] - 1): <NEW_LINE> <INDENT> result.append(im.getpixel((0, y))) <NEW_LINE> result.append(im.getpixel((im.size[0] - 1, y))) <NEW_LINE> <DEDENT> return result
This function will get the color values of the pixels that are located on the edge of an image :param im: The image of type (:py:class:`~PIL.Image.Image`), of which the pixel values are extracted. :returns: A list of tuples that represent the pixel values.
625941be379a373c97cfaa59
def keyPressEvent(self, event): <NEW_LINE> <INDENT> if event.key() == QtCore.Qt.Key_Delete: <NEW_LINE> <INDENT> self.delete_current_point()
Internal keypress handler
625941bed8ef3951e3243452
def testRestGetTwoPages(self): <NEW_LINE> <INDENT> command = 'test command' <NEW_LINE> rtn_vals1 = [1, 2, 3] <NEW_LINE> rtn_vals2 = [4, 5, 6] <NEW_LINE> req_resp = [{'isLastPage': False, 'nextPageStart': 10, 'values': rtn_vals1}, {'isLastPage': True, 'nextPageStart': 0, 'values': rtn_vals2}] <NEW_LINE> with patch('bb_reader.BBDirReader._request', side_effect=req_resp) as mock_request, patch('bb_reader.BBDirReader._get_repo_names', return_value=self._names): <NEW_LINE> <INDENT> reader = bb_reader.BBDirReader(self._context) <NEW_LINE> retn = reader._bb_rest_get(command) <NEW_LINE> <DEDENT> self.assertEqual(retn, rtn_vals1 + rtn_vals2) <NEW_LINE> url = '{}/{}?start='.format(self._env_dict['BB_REST_URL'], command) <NEW_LINE> mock_request.assert_has_calls([call(url + '0', return_json=True), call(url + '10', return_json=True)], any_order=True)
Test the rest request with 2 page response.
625941becad5886f8bd26eef
def views_in_group(self, group): <NEW_LINE> <INDENT> pass
return [View] Returns all open views in the given group.
625941be099cdd3c635f0b71
def process_datapoint(self, datapoint): <NEW_LINE> <INDENT> timestamp = datapoint.timestamp <NEW_LINE> if self._current_point and self._current_point.timestamp.date() != timestamp.date(): <NEW_LINE> <INDENT> self._first_timestamp = timestamp <NEW_LINE> <DEDENT> self._current_point = datapoint <NEW_LINE> if len(self.positions) == 0: <NEW_LINE> <INDENT> self.positions.append((timestamp, datapoint.C, 0)) <NEW_LINE> <DEDENT> super(PositionalStrategy, self).process_datapoint(datapoint)
Accept `datapoint` and prepare to execute next `step`. Datapoint is assumed to be OHLC bar with associated timestamp in corresponding attributes.
625941be236d856c2ad446ec
def generateSubsamples(df,save_folder, nrep = 100, frac = 0.75, seed_entropy = 237262676468864319646780408567402854442,resample=True): <NEW_LINE> <INDENT> if not resample: <NEW_LINE> <INDENT> with open(os.path.join(save_folder, 'samples.pk'), 'rb') as file_name: <NEW_LINE> <INDENT> samples = pickle.load(file_name) <NEW_LINE> <DEDENT> return samples <NEW_LINE> <DEDENT> sq1 = np.random.SeedSequence(seed_entropy) <NEW_LINE> seeds = sq1.generate_state(nrep) <NEW_LINE> samples = [df.sample(frac = frac,random_state =seeds[i]) for i in range(nrep)] <NEW_LINE> saveTo(samples,save_folder,'samples.pk') <NEW_LINE> return samples
Subsample a fraction frac of data, repeat nrep times Save the generated samples as pickled list to save/(river)/samples.pk. Parameters ---------- df: pandas DataFrame, shape (n_samples, n_nodes) The input data. Each row is an observation where some of the nodes have extreme values. frac: a number in [0,1] fraction of data to be sampled without replacement each time nrep: int number of times data to be subsampled seed_entropy: int for reproducibility of data subsampling. river: subfolder name. (Recommended to be the river name). resample: default True.Generate new samples. if False: try to load existing samples from save/(river)/samples.pk
625941be7d43ff24873a2bb3
def _disable_complete_on_space(self) -> str: <NEW_LINE> <INDENT> delay_factor = self.select_delay_factor(delay_factor=0) <NEW_LINE> time.sleep(delay_factor * 0.1) <NEW_LINE> command = "environment command-completion space false" <NEW_LINE> self.write_channel(self.normalize_cmd(command)) <NEW_LINE> time.sleep(delay_factor * 0.1) <NEW_LINE> return self.read_channel()
SR-OS tries to auto complete commands when you type a "space" character. This is a bad idea for automation as what your program is sending no longer matches the command echo from the device, so we disable this behavior.
625941be8e7ae83300e4aee1
def block_hash_file(self, radosobject): <NEW_LINE> <INDENT> hashes = [] <NEW_LINE> append = hashes.append <NEW_LINE> block_hash = self.block_hash <NEW_LINE> for block in file_sync_read_chunks(radosobject, self.blocksize, 1, 0): <NEW_LINE> <INDENT> append(block_hash(block)) <NEW_LINE> <DEDENT> return hashes
Return the list of hashes (hashes map) for the blocks in a buffered file. Helper method, does not affect store.
625941bef8510a7c17cf9610
def test_get_sun_pitch_yaw(): <NEW_LINE> <INDENT> pitch, yaw = get_sun_pitch_yaw(109, 55.3, time='2021:242') <NEW_LINE> assert np.allclose((pitch, yaw), (60.453385, 29.880125)) <NEW_LINE> pitch, yaw = get_sun_pitch_yaw(238.2, -58.9, time='2021:242') <NEW_LINE> assert np.allclose((pitch, yaw), (92.405603, 210.56582)) <NEW_LINE> pitch, yaw = get_sun_pitch_yaw(338, -9.1, time='2021:242') <NEW_LINE> assert np.allclose((pitch, yaw), (179.417797, 259.703451))
Test that values approximately match those from ORviewer. See slack discussion "ORviewer sun / anti-sun plots azimuthal Sun yaw angle"
625941be99cbb53fe6792afc
def test_102_eab_check(self): <NEW_LINE> <INDENT> payload = 'payload' <NEW_LINE> protected = None <NEW_LINE> result = (403, 'urn:ietf:params:acme:error:externalAccountRequired', 'external account binding required') <NEW_LINE> self.assertEqual(result, self.account._eab_check(protected, payload))
test external account binding payload and but no protected
625941be0383005118ecf4fa
def test_register_noname(self): <NEW_LINE> <INDENT> if self._register_device(noname=True): <NEW_LINE> <INDENT> raise TestSuiteRunningError("The server must not accept registering with no id and name")
Register a device for the current license wih no id nor name. The server should return an error
625941bebe8e80087fb20b5c
def get_transformed(self, transformers): <NEW_LINE> <INDENT> block_structure = self.get_collected() <NEW_LINE> transformers.transform(block_structure) <NEW_LINE> return block_structure
Returns the transformed Block Structure for the root_block_usage_key, getting block data from the cache and modulestore, as needed. Details: Same as the get_collected method, except the transformers' transform methods are also called. Arguments: transformers (BlockStructureTransformers) - Collection of transformers to apply. Returns: BlockStructureBlockData - A transformed block structure, starting at self.root_block_usage_key.
625941be97e22403b379ceae
def process(self, resp): <NEW_LINE> <INDENT> pass
:param resp: :return:
625941be92d797404e30409f
def bboxes_nms_fast(classes, scores, bboxes, threshold=0.45): <NEW_LINE> <INDENT> pass
Apply non-maximum selection to bounding boxes.
625941bebaa26c4b54cb1038
def save_hdf5(self, filename, force_overwrite=True): <NEW_LINE> <INDENT> with HDF5TrajectoryFile(filename, 'w', force_overwrite=force_overwrite) as f: <NEW_LINE> <INDENT> f.write(coordinates=in_units_of(self.xyz, Trajectory._distance_unit, f.distance_unit), time=self.time, cell_lengths=in_units_of(self.unitcell_lengths, Trajectory._distance_unit, f.distance_unit), cell_angles=self.unitcell_angles) <NEW_LINE> f.topology = self.topology
Save trajectory to MDTraj HDF5 format Parameters ---------- filename : str filesystem path in which to save the trajectory force_overwrite : bool, default=True Overwrite anything that exists at filename, if its already there
625941be9c8ee82313fbb68a
def get_action(self, consumer: Actor) -> Optional[ActionOrHandler]: <NEW_LINE> <INDENT> return actions.ItemAction(consumer, self.parent)
try to return the action for this item
625941be3c8af77a43ae36b3
def db_sent_updater(self, action, sender_id=None, postid=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if action == 'update': <NEW_LINE> <INDENT> day = (datetime.now(timezone.utc) + timedelta(hours=self.credential.Timezone)).day <NEW_LINE> if day != self.day: <NEW_LINE> <INDENT> self.day = day <NEW_LINE> self.db_sent.clear() <NEW_LINE> <DEDENT> <DEDENT> elif action == 'add': <NEW_LINE> <INDENT> if sender_id not in self.db_sent: <NEW_LINE> <INDENT> self.db_sent[sender_id] = [postid] <NEW_LINE> <DEDENT> else: self.db_sent[sender_id] += [postid] <NEW_LINE> <DEDENT> elif action == 'delete': <NEW_LINE> <INDENT> self.db_sent[sender_id].remove(postid) <NEW_LINE> if len(self.db_sent[sender_id]) == 0: <NEW_LINE> <INDENT> del self.db_sent[sender_id] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> pass <NEW_LINE> print(ex)
Update self.db_sent :param action: 'update','add' or 'delete' -> str :param sender_id: sender id who has sent the menfess -> str :param postid: tweet id or (sender_id, tweet id) -> str or tuple
625941be460517430c3940a1
def __navigateForward(self): <NEW_LINE> <INDENT> if len(self.__currentHistory) > 0 and self.__currentHistoryLocation < len(self.__currentHistory) - 1: <NEW_LINE> <INDENT> self.__currentHistoryLocation += 1 <NEW_LINE> url = self.__currentHistory[self.__currentHistoryLocation] <NEW_LINE> self.selectUrl(url)
Navigate through the history one step forward.
625941be6fb2d068a760efb0
def testE(self): <NEW_LINE> <INDENT> gr = LognormalRestraint(*self.all) <NEW_LINE> self.m.add_restraint(gr) <NEW_LINE> for i in range(100): <NEW_LINE> <INDENT> map(self.change_value, self.all) <NEW_LINE> e = self.m.evaluate(False) <NEW_LINE> self.assertAlmostEqual(e, self.normal_e(*self.all))
Test LognormalRestraint(23) score
625941be7b25080760e39370
def to_categorical(y): <NEW_LINE> <INDENT> y = LongTensor(y).view(-1, 1) <NEW_LINE> y_onehot = Tensor(y.size(0), 10) <NEW_LINE> y_onehot.zero_() <NEW_LINE> y_onehot.scatter_(1, y, 1) <NEW_LINE> return y_onehot
1-hot encodes a tensor
625941bec4546d3d9de72947
def swissPairings(): <NEW_LINE> <INDENT> standings = [(data[0], data[1]) for data in playerStandings()] <NEW_LINE> if len(standings) < 2: <NEW_LINE> <INDENT> raise KeyError("Looks like we dont have enough players, bring someone on board.") <NEW_LINE> <DEDENT> left = standings[0::2] <NEW_LINE> right = standings[1::2] <NEW_LINE> pairings = zip(left, right) <NEW_LINE> results = [tuple(list(sum(pairing, ()))) for pairing in pairings] <NEW_LINE> return results
Returns a list of pairs of players for the next round of a match. Assuming that there are an even number of players registered, each player appears exactly once in the pairings. Each player is paired with another player with an equal or nearly-equal win record, that is, a player adjacent to him or her in the standings. Returns: A list of tuples, each of which contains (id1, name1, id2, name2) id1: the first player's unique id name1: the first player's name id2: the second player's unique id name2: the second player's name
625941be5fdd1c0f98dc0148
def make_move(self, direction): <NEW_LINE> <INDENT> super(GameDisplay, self).make_move(direction) <NEW_LINE> self.win.queue_draw()
Carries out a move in the specified direction and queues a display update. @param direction: which direction to move in
625941bebf627c535bc130e4
def check_tie(board): <NEW_LINE> <INDENT> for list in board: <NEW_LINE> <INDENT> for symbol in list: <NEW_LINE> <INDENT> if (symbol != "X") or (symbol != "O"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if check_win(board) == True: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Check the game for a tie, no available locations and no winners. args: board: 3x3 table (list of lists) containing the current state of the game returns: True if there is a tie. False the board is not full yet or there is a winner
625941be0a366e3fb873e72d
def matching(pot): <NEW_LINE> <INDENT> for player in pot.players: <NEW_LINE> <INDENT> if not player.hand: <NEW_LINE> <INDENT> trips = [] <NEW_LINE> pairs = [] <NEW_LINE> for card in player.hole_cards: <NEW_LINE> <INDENT> count = player.hole_cards.count(card) <NEW_LINE> if count == 4: <NEW_LINE> <INDENT> kicker = [x for x in player.hole_cards if x != card] <NEW_LINE> player.hand = [7, card, kicker[0]] <NEW_LINE> break <NEW_LINE> <DEDENT> elif count == 3: <NEW_LINE> <INDENT> trips.append(card) <NEW_LINE> player.hole_cards = [x for x in player.hole_cards if x != card] <NEW_LINE> <DEDENT> elif count == 2: <NEW_LINE> <INDENT> pairs.append(card) <NEW_LINE> player.hole_cards = [x for x in player.hole_cards if x != card] <NEW_LINE> <DEDENT> <DEDENT> if trips: <NEW_LINE> <INDENT> if len(trips) == 2: <NEW_LINE> <INDENT> player.hand = [6, trips[0], trips[1]] <NEW_LINE> <DEDENT> elif pairs: <NEW_LINE> <INDENT> player.hand = [6, trips[0], pairs[0]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> player.hand = [3, trips[0], player.hole_cards[0], player.hole_cards[1]] <NEW_LINE> <DEDENT> <DEDENT> elif pairs: <NEW_LINE> <INDENT> if len(pairs) > 1: <NEW_LINE> <INDENT> player.hand = [2, pairs[0], pairs[1], player.hole_cards[0]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> player.hand = [1, pairs[0], player.hole_cards[0], player.hole_cards[1], player.hole_cards[2]] <NEW_LINE> <DEDENT> <DEDENT> elif not player.hand: <NEW_LINE> <INDENT> player.hand = [0] + player.hole_cards
identifies the highest value matching hands eg quads to pairs
625941bea219f33f34628883
def add_route(self, uri_template, resource, **kwargs): <NEW_LINE> <INDENT> if not isinstance(uri_template, compat.string_types): <NEW_LINE> <INDENT> raise TypeError('uri_template is not a string') <NEW_LINE> <DEDENT> if not uri_template.startswith('/'): <NEW_LINE> <INDENT> raise ValueError("uri_template must start with '/'") <NEW_LINE> <DEDENT> if '//' in uri_template: <NEW_LINE> <INDENT> raise ValueError("uri_template may not contain '//'") <NEW_LINE> <DEDENT> self._router.add_route(uri_template, resource, **kwargs)
Associate a templatized URI path with a resource. Falcon routes incoming requests to resources based on a set of URI templates. If the path requested by the client matches the template for a given route, the request is then passed on to the associated resource for processing. If no route matches the request, control then passes to a default responder that simply raises an instance of :class:`~.HTTPNotFound`. This method delegates to the configured router's ``add_route()`` method. To override the default behavior, pass a custom router object to the :class:`~.API` initializer. (See also: :ref:`Routing <routing>`) Args: uri_template (str): A templatized URI. Care must be taken to ensure the template does not mask any sink patterns, if any are registered. (See also: :meth:`~.add_sink`) resource (instance): Object which represents a REST resource. Falcon will pass GET requests to ``on_get()``, PUT requests to ``on_put()``, etc. If any HTTP methods are not supported by your resource, simply don't define the corresponding request handlers, and Falcon will do the right thing. Keyword Args: suffix (str): Optional responder name suffix for this route. If a suffix is provided, Falcon will map GET requests to ``on_get_{suffix}()``, POST requests to ``on_post_{suffix}()``, etc. In this way, multiple closely-related routes can be mapped to the same resource. For example, a single resource class can use suffixed responders to distinguish requests for a single item vs. a collection of those same items. Another class might use a suffixed responder to handle a shortlink route in addition to the regular route for the resource. Note: Any additional keyword arguments not defined above are passed through to the underlying router's ``add_route()`` method. The default router ignores any additional keyword arguments, but custom routers may take advantage of this feature to receive additional options when setting up routes. Custom routers MUST accept such arguments using the variadic pattern (``**kwargs``), and ignore any keyword arguments that they don't support.
625941bebe383301e01b53a1
def run(self): <NEW_LINE> <INDENT> for scenario in self._scenarios: <NEW_LINE> <INDENT> self.scenario_start() <NEW_LINE> should_continue = None <NEW_LINE> generation_count = 0 <NEW_LINE> while should_continue is None or should_continue: <NEW_LINE> <INDENT> self._run_one_generation( scenario[SIMULATOR_CLASS], scenario[SIMULATOR_CONFIGURATION], generation_count) <NEW_LINE> should_continue, self._simulation_states = self.evolve( self._simulation_states) <NEW_LINE> generation_count += 1
Run several simulation scenarios. Using the configuration: - run first scenario until condition function is satisfied - run next scenario if there is one - use a dictionary as "simulation state" where SomObject instances can pass data from one run to the next one - the simulation state should contain score information for the evolve function as well, which the should_run function could provide A scenario consists of a number of simulations run in parallel. After each round a "evolution" function can manipulate the simulation states and decide if another round of the same scenario should be run (a new generation, so to speak).
625941be26238365f5f0ed80
def get_meshfn_from_dagpath(dagpath): <NEW_LINE> <INDENT> m_dagpath = get_dagpath_from_name(dagpath) <NEW_LINE> return om.MFnMesh(m_dagpath)
return a functionset for a specified dagpath :param dagpath : input dagpath
625941beab23a570cc250096
def order_nodes(qs, **kwargs): <NEW_LINE> <INDENT> select = {} <NEW_LINE> order_by = [] <NEW_LINE> field = kwargs.get('field') <NEW_LINE> if field: <NEW_LINE> <INDENT> select['date_is_null'] = '{0} IS NULL'.format(field) <NEW_LINE> order_by.append('date_is_null') <NEW_LINE> order_by.append(field) <NEW_LINE> <DEDENT> context = kwargs.get('context') <NEW_LINE> if context: <NEW_LINE> <INDENT> locations = context.locations_available.all() <NEW_LINE> for loc in locations: <NEW_LINE> <INDENT> name = 'loc{0}'.format(loc.pk) <NEW_LINE> select[name] = 'tag_string LIKE \':%%{0}%%:\''.format(loc.tag_string) <NEW_LINE> order_by.append('-{0}'.format(name)) <NEW_LINE> <DEDENT> <DEDENT> return qs.extra( select=select, order_by=order_by )
Accepts a queryset (nominally of Node objects) and sorts them by context and/or date. Similar to queryset.order_by() except more fine-grained. Accepts these argument and applies them in order: - context - field (datetime)
625941be167d2b6e31218aac
def main(): <NEW_LINE> <INDENT> gd_list = [ ['001.jpg', 1, 0, 0], ['002.jpg', 0, 1, 0], ['003.jpg', 0, 0, 1], ['004.jpg', 1, 0, 0], ['005.jpg', 1, 0, 0], ] <NEW_LINE> test_list = [ ['001.jpg', 0.5, 0.2, 0.3], ['002.jpg', 0.6, 0.2, 0.3], ['003.jpg', 0.3, 0.2, 0.3], ['004.jpg', 0.2, 0.2, 0.3], ['005.jpg', 0.9, 0.2, 0.3], ] <NEW_LINE> print(calculate_ap_for_class(gd_list, test_list, 0)) <NEW_LINE> print(calculate_ap(gd_list, test_list, 3))
Test Module.
625941be23849d37ff7b2fa6
def on_settings_changed(self, key, udata=None): <NEW_LINE> <INDENT> pass
Handle settings changes
625941befff4ab517eb2f350
def page_size(self): <NEW_LINE> <INDENT> return self.size(height=self.single_page_size, width=self.single_page_size, exact=True)
page size for display: :attr:`SINGLE_PAGE_SIZE` on the long edge
625941becad5886f8bd26ef0
def post(self, request: Request): <NEW_LINE> <INDENT> img = request.FILES.get('img') <NEW_LINE> url = upload(img) <NEW_LINE> context = {"img_url": url} <NEW_LINE> return Response(context)
上传图片 需要字段:img(文件类型)
625941beeab8aa0e5d26da70
def get_url(self): <NEW_LINE> <INDENT> domain = Site.objects.get_current().domain <NEW_LINE> return 'https://{}{}'.format(domain, self.get_url_path())
Return the (actual) absolute URL, with protocol & domain. https://code.djangoproject.com/wiki/ReplacingGetAbsoluteUrl
625941be66673b3332b91fa7
def imust_get_department_data_view(request): <NEW_LINE> <INDENT> ret_data = imust_get_department_data() <NEW_LINE> return success_response(ret_data)
URL[GET]:/data/imust/get_department_data/
625941bef9cc0f698b140514