code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.id == None: <NEW_LINE> <INDENT> self.answer_data = self.question.default_SolutionData() <NEW_LINE> <DEDENT> super(CorrectAnswer, self).save(*args, **kwargs) | Add default answer_data before saving new CorrectAnswer | 625941bf4d74a7450ccd40fc |
def get_correctness_test_inputs(use_numpy, use_validation_data, with_distribution, x_train, y_train, x_predict): <NEW_LINE> <INDENT> training_epochs = 2 <NEW_LINE> global_batch_size = 64 <NEW_LINE> batch_size = global_batch_size <NEW_LINE> use_per_core_batch_size = ( with_distribution and not distributed_training_utils.global_batch_size_supported( with_distribution)) <NEW_LINE> if use_per_core_batch_size: <NEW_LINE> <INDENT> batch_size //= with_distribution.num_replicas_in_sync <NEW_LINE> <DEDENT> if use_numpy: <NEW_LINE> <INDENT> training_inputs = { 'batch_size': batch_size, 'x': x_train, 'y': y_train, 'epochs': training_epochs, 'shuffle': False, } <NEW_LINE> if use_validation_data: <NEW_LINE> <INDENT> eval_inputs = None <NEW_LINE> training_inputs['validation_data'] = (x_train, y_train) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> eval_inputs = { 'batch_size': batch_size, 'x': x_train, 'y': y_train, } <NEW_LINE> <DEDENT> predict_inputs = { 'x': np.array(x_predict, dtype=np.float32), } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> train_dataset = dataset_ops.Dataset.from_tensor_slices( (x_train, y_train)) <NEW_LINE> x = batch_wrapper( train_dataset, batch_size, with_distribution, repeat=training_epochs) <NEW_LINE> training_inputs = { 'batch_size': None, 'x': x, 'y': None, 'epochs': training_epochs, 'shuffle': False, 'steps_per_epoch': len(x_train) // global_batch_size, } <NEW_LINE> if use_validation_data: <NEW_LINE> <INDENT> eval_inputs = None <NEW_LINE> eval_dataset = dataset_ops.Dataset.from_tensor_slices( (x_train, y_train)) <NEW_LINE> x = batch_wrapper(eval_dataset, batch_size, with_distribution) <NEW_LINE> training_inputs['validation_data'] = x <NEW_LINE> training_inputs['validation_steps'] = 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> eval_inputs = { 'batch_size': None, 'x': x, 'y': None, 'steps': 20, } <NEW_LINE> <DEDENT> predict_batch_size = len(x_predict) <NEW_LINE> if use_per_core_batch_size: <NEW_LINE> <INDENT> predict_batch_size //= with_distribution.num_replicas_in_sync <NEW_LINE> <DEDENT> predict_dataset = dataset_ops.Dataset.from_tensor_slices(x_predict) <NEW_LINE> predict_dataset = batch_wrapper(predict_dataset, predict_batch_size, with_distribution) <NEW_LINE> predict_inputs = { 'steps': 1, 'x': predict_dataset, } <NEW_LINE> <DEDENT> return training_inputs, eval_inputs, predict_inputs | Generates the inputs for correctness check when enable Keras with DS. | 625941bf5510c4643540f324 |
def create_conf_loader(*args, **kwargs): <NEW_LINE> <INDENT> import warnings <NEW_LINE> warnings.warn( '"create_conf_loader" has been renamed to "create_config_loader".', DeprecationWarning ) <NEW_LINE> return create_config_loader(*args, **kwargs) | Create a default configuration loader.
.. deprecated:: 1.0.0b1
Use :func:`create_config_loader` instead. This function will be removed
in version 1.0.1. | 625941bfcb5e8a47e48b79e7 |
def start(start_router): <NEW_LINE> <INDENT> fwglobals.log.info("start router...") <NEW_LINE> daemon_rpc('start', start_vpp=start_router) <NEW_LINE> fwglobals.log.info("done") | Handles 'fwagent start' command.
Starts the infinite connection loop run by Fwagent in daemon mode.
See documentation on FwagentDaemon class.
:param start_router: Start router, while applying router configuration.
:returns: None. | 625941bf7d43ff24873a2bd7 |
def test_daily_working_hours_attribute_is_a_negative_number(): <NEW_LINE> <INDENT> wh = WorkingHours() <NEW_LINE> with pytest.raises(ValueError) as cm: <NEW_LINE> <INDENT> wh.daily_working_hours = -10 <NEW_LINE> <DEDENT> assert str(cm.value) == 'WorkingHours.daily_working_hours should be a positive integer ' 'value greater than 0 and smaller than or equal to 24' | testing if a ValueError will be raised when the daily_working_hours
attribute is set to a negative value | 625941bfd53ae8145f87a1ad |
def test_html(self): <NEW_LINE> <INDENT> contents_that_should_be_shown = [ (1, self.e1.name), (1, self.e2.name), (1, 'href="{}"'.format(r('core:event-create'))), ] <NEW_LINE> for count, expected in contents_that_should_be_shown: <NEW_LINE> <INDENT> with self.subTest(): <NEW_LINE> <INDENT> self.assertContains(self.response, expected, count) <NEW_LINE> <DEDENT> <DEDENT> contents_that_should_not_be_shown = [ self.e3.name, ] <NEW_LINE> for expected in contents_that_should_not_be_shown: <NEW_LINE> <INDENT> with self.subTest(): <NEW_LINE> <INDENT> self.assertNotContains(self.response, expected) | Must show only event created by user logged in. | 625941bfb830903b967e9846 |
def displayText(self, value, locale): <NEW_LINE> <INDENT> return QStyledItemDelegate.displayText(self, self.tr(self.operations[value]), locale) | displayText ( const QVariant & value, const QLocale & locale ) | 625941bf9f2886367277a7c9 |
def load_particle_data(datafile, units): <NEW_LINE> <INDENT> res = np.fromfile( datafile, dtype=[('Id', np.dtype(int)), ('Values', np.dtype(float), 11)]) <NEW_LINE> ids = res["Id"] <NEW_LINE> vals = res["Values"] <NEW_LINE> particles = { "id": ids, "r": vals[:, 0]*u.Unit(units["length"]), "phi": vals[:, 1]*u.rad, "r dot": vals[:, 2]*u.Unit(units["length"])/u.Unit(units["time"]), "phi dot": vals[:, 3]*u.rad/u.Unit(units["time"]), "r ddot": vals[:, 4]*u.Unit(units["length"])/u.Unit(units["time"])**2, "phi ddot": vals[:, 5]*u.rad/u.Unit(units["time"])**2, "mass": vals[:, 6]*u.Unit(units["mass"]), "size": vals[:, 7]*u.Unit(units["length"]), "stokes number": vals[:, 10] } <NEW_LINE> return particles | Load a variable from a fargocpt datafile for all particles.
Parameters
----------
datafile : str
Path to the datafile.
varname : str
Name of the variable.
Returns
-------
dict
Dictionary containing id, x, y and size (the particle radius). | 625941bf23849d37ff7b2fca |
def texture_update(self, *largs): <NEW_LINE> <INDENT> mrkup = self._label.__class__ is CoreMarkupLabel <NEW_LINE> self.texture = None <NEW_LINE> if (not self._label.text or (self.halign[-1] == 'y' or self.strip) and not self._label.text.strip()): <NEW_LINE> <INDENT> self.texture_size = (0, 0) <NEW_LINE> if mrkup: <NEW_LINE> <INDENT> self.refs, self._label._refs = {}, {} <NEW_LINE> self.anchors, self._label._anchors = {}, {} <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if mrkup: <NEW_LINE> <INDENT> text = self._label.text <NEW_LINE> if self.halign[-1] == 'y' or self.strip: <NEW_LINE> <INDENT> text = text.strip() <NEW_LINE> <DEDENT> self._label.text = ''.join(('[color=', get_hex_from_color(self.color), ']', text, '[/color]')) <NEW_LINE> self._label.refresh() <NEW_LINE> if self._label.texture: <NEW_LINE> <INDENT> self._label.texture.bind() <NEW_LINE> <DEDENT> self._label.text = text <NEW_LINE> self.refs = self._label.refs <NEW_LINE> self.anchors = self._label.anchors <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._label.refresh() <NEW_LINE> <DEDENT> texture = self._label.texture <NEW_LINE> if texture is not None: <NEW_LINE> <INDENT> self.texture = self._label.texture <NEW_LINE> self.texture_size = list(self.texture.size) | Force texture recreation with the current Label properties.
After this function call, the :attr:`texture` and :attr:`texture_size`
will be updated in this order. | 625941bf1f5feb6acb0c4a8d |
def inorder(root: Optional[TreeNode]) -> Generator[int, None, None]: <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> yield from inorder(root.left) <NEW_LINE> yield root.val <NEW_LINE> yield from inorder(root.right) | Return tree values in inorder | 625941bfdd821e528d63b0e4 |
def train(self, epochs = 100000): <NEW_LINE> <INDENT> f = open("log", 'w') <NEW_LINE> rewards = numpy.zeros(epochs) <NEW_LINE> disp_freq = 2000 <NEW_LINE> decay = 0.95 <NEW_LINE> replay_freq = 1 <NEW_LINE> print ("start of training") <NEW_LINE> start_time = time.time() <NEW_LINE> mini_batch_size = 32 <NEW_LINE> for epch in range(epochs): <NEW_LINE> <INDENT> self.game.board.set_state([0,0,0,0,0,0,0,0,0]) <NEW_LINE> result = self.game.run_main() <NEW_LINE> reward = result <NEW_LINE> self.update_model(self.last_act, reward, is_terminal = True) <NEW_LINE> rewards[epch] = reward <NEW_LINE> if len(self.memory) > self.exp_count + replay_freq: <NEW_LINE> <INDENT> mini_batch = random.sample(self.memory, mini_batch_size) <NEW_LINE> X_train = [] <NEW_LINE> Y_train = [] <NEW_LINE> for exp in mini_batch: <NEW_LINE> <INDENT> State, Action, Reward, New_state = exp <NEW_LINE> Q = self.model.predict(State.reshape(1,1,self.x_shape[0],self.x_shape[1]), batch_size=1) <NEW_LINE> y = Q.copy() <NEW_LINE> if New_state is not None: <NEW_LINE> <INDENT> New_Q = self.model.predict(New_state.reshape(1,1,self.x_shape[0],self.x_shape[1]), batch_size=1) <NEW_LINE> maxQ = New_Q.max() <NEW_LINE> update = Reward + self.gamma*maxQ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> update = Reward <NEW_LINE> <DEDENT> y[0][Action] = update <NEW_LINE> X_train.append(State.reshape(1,self.x_shape[0],self.x_shape[1])) <NEW_LINE> Y_train.append(y.flatten()) <NEW_LINE> <DEDENT> self.model.fit(numpy.array(X_train), numpy.array(Y_train), batch_size=mini_batch_size, nb_epoch=1, verbose=0) <NEW_LINE> del self.memory[0:replay_freq] <NEW_LINE> <DEDENT> if epch%disp_freq == 0 and epch >= disp_freq: <NEW_LINE> <INDENT> if self.epsilon*decay < 0.1: <NEW_LINE> <INDENT> decay = 1.0 <NEW_LINE> <DEDENT> self.epsilon = self.epsilon*decay <NEW_LINE> print ("rewards at epoch %s is %s and epsiolon is %s:"%(epch,rewards[epch-disp_freq:epch].mean(), self.epsilon)) <NEW_LINE> f.write("rewards at epoch %s is %s and epsiolon is %s\n:"%(epch,rewards[epch-disp_freq:epch].mean(), self.epsilon)) <NEW_LINE> print ("%s epochs are done in %s"%(epch, time.time()-start_time)) <NEW_LINE> <DEDENT> <DEDENT> f.close() <NEW_LINE> open("model",'w').write(self.model.to_json()) <NEW_LINE> self.model.save_weights('model_weights.h5') | training a DQLsolver. | 625941bf3617ad0b5ed67e32 |
def send_redfish_request (serverid, uri, service_port = 0, method = "GET", data = None, encrypted = True, connect_timeout = 2): <NEW_LINE> <INDENT> access = get_blade_map_library () <NEW_LINE> user = create_string_buffer (48) <NEW_LINE> passwd = create_string_buffer (48) <NEW_LINE> status = access.get_server_rest_access (byref (user), byref (passwd)) <NEW_LINE> if (status != 0): <NEW_LINE> <INDENT> raise RuntimeError ("Failed to get server access credentials: {0}".format (status)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ip = get_hostname_by_serverid (serverid) <NEW_LINE> cmd = ["curl", "-sS", "-k", "-i", "-u", "{0}:{1}".format (user.value, passwd.value), "-X", method, "--connect-timeout", "{0}".format (connect_timeout)] <NEW_LINE> if (data): <NEW_LINE> <INDENT> cmd.extend (["-d", "{0}".format (data), "-H", "Content-Type: application/json"]) <NEW_LINE> <DEDENT> cmd.append ("{0}://{1}{2}{3}".format ("https" if (encrypted) else "http", ip, "" if (not service_port) else ":{0}".format (service_port), uri)) <NEW_LINE> response = subprocess.check_output (cmd, stderr = subprocess.STDOUT) <NEW_LINE> status, body = parse_http_response (response) <NEW_LINE> if (status >= 300): <NEW_LINE> <INDENT> raise rest_http_error (status, body) <NEW_LINE> <DEDENT> return body <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as error: <NEW_LINE> <INDENT> raise RuntimeError (error.output.strip ()) | Send a Redfish request to the REST service on a system.
:param serverid: The ID for the server to send the request to.
:param uri: The resource to access.
:param service_port: The port the REST service is listening on.
:param method: The HTTP request type to execute.
:param data: The data to send with the request.
:param encrypted: Flag indicating if HTTPS should be used for the requested instead of plain
HTTP.
:param connect_timeout: The number of seconds to wait for a connection before timing out the
request.
:return The data returned from the Redfish request. | 625941bfcc40096d6159588b |
def verify_domain(self, value): <NEW_LINE> <INDENT> parsed = urlparse('//%s' % value) <NEW_LINE> if parsed.scheme or parsed.path or parsed.params or parsed.query or parsed.fragment or parsed.username or parsed.password or parsed.port: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | verify a domain (we need this in multiple places) | 625941bf30dc7b76659018a2 |
def convert_to_mrtrix(order): <NEW_LINE> <INDENT> dim_sh = dimension(order) <NEW_LINE> conversion_matrix = np.zeros((dim_sh, dim_sh)) <NEW_LINE> for j in range(dim_sh): <NEW_LINE> <INDENT> l = sh_degree(j) <NEW_LINE> m = sh_order(j) <NEW_LINE> if m == 0: <NEW_LINE> <INDENT> conversion_matrix[j, j] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conversion_matrix[j, j - 2*m] = np.sqrt(2) <NEW_LINE> <DEDENT> <DEDENT> return conversion_matrix | Returns the linear matrix used to convert coefficients into the mrtrix
convention for spherical harmonics.
Parameters
----------
order : int
Returns
-------
conversion_matrix : array-like, shape (dim_sh, dim_sh) | 625941bfd8ef3951e3243477 |
def get_app(self, app_id, version=None): <NEW_LINE> <INDENT> app_id = util.normalize_marathon_id_path(app_id) <NEW_LINE> if version is None: <NEW_LINE> <INDENT> path = 'v2/apps{}'.format(app_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = 'v2/apps{}/versions/{}'.format(app_id, version) <NEW_LINE> <DEDENT> response = self._rpc.http_req(http.get, path) <NEW_LINE> if version is None: <NEW_LINE> <INDENT> return response.json().get('app') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return response.json() | Returns a representation of the requested application version. If
version is None the return the latest version.
:param app_id: the ID of the application
:type app_id: str
:param version: application version as a ISO8601 datetime
:type version: str
:returns: the requested Marathon application
:rtype: dict | 625941bf76e4537e8c3515aa |
def scaled_imagedata(imageid, size='150x150'): <NEW_LINE> <INDENT> url = scaled_imageurl(imageid, size) <NEW_LINE> http = httplib2.Http() <NEW_LINE> response, content = http.request(url, 'GET') <NEW_LINE> if str(response.status) == '200': <NEW_LINE> <INDENT> return content <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | Returns the datasteream of a scaled image. | 625941bf30bbd722463cbcfd |
def validate_host(cert, name): <NEW_LINE> <INDENT> cn = None <NEW_LINE> for t, v in cert.get_subject().get_components(): <NEW_LINE> <INDENT> if t == b'CN': <NEW_LINE> <INDENT> cn = v <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return cn == name | Validates host name against certificate
@param cert: Certificate returned by host
@param name: Actual host name used for connection
@return: Returns true if host name matches certificate | 625941bf55399d3f055885ed |
def shell_sort(data): <NEW_LINE> <INDENT> count = len(data) <NEW_LINE> step = 2 <NEW_LINE> group = np.int(count / step) <NEW_LINE> while group > 0: <NEW_LINE> <INDENT> for i in range(group): <NEW_LINE> <INDENT> j = i + group <NEW_LINE> while j < count: <NEW_LINE> <INDENT> k = j - group <NEW_LINE> key = data[j] <NEW_LINE> while k >= 0 and key < data[k]: <NEW_LINE> <INDENT> data[k + group] = data[k] <NEW_LINE> k -= group <NEW_LINE> <DEDENT> data[k + group] = key <NEW_LINE> j += group <NEW_LINE> <DEDENT> <DEDENT> group = np.int(group / step) <NEW_LINE> <DEDENT> return data | DL.Shell sort | 625941bff7d966606f6a9f3b |
def maas(input_dim, num_classes=29, num_hiddens=1824, dropout=0.1, max_value=20): <NEW_LINE> <INDENT> input_data = Input(name='the_input', shape=(None, input_dim)) <NEW_LINE> o = input_data <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = Activation(lambda x: relu(x, max_value= max_value), name='relu1')(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = Activation(lambda x: relu(x, max_value= max_value), name='relu2')(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = Bidirectional(SimpleRNN(num_hiddens, return_sequences=True, activation= 'relu', kernel_initializer="he_normal", dropout=0.1))(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = Activation(lambda x: relu(x, max_value= max_value), name='relu3')(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = Activation(lambda x: relu(x, max_value= max_value), name='relu4')(o) <NEW_LINE> o = TimeDistributed(Dense(num_hiddens))(o) <NEW_LINE> o = TimeDistributed(Dense(num_classes))(o) <NEW_LINE> y_pred = Activation('softmax', name='softmax')(o) <NEW_LINE> model = Model(inputs=input_data, outputs=y_pred) <NEW_LINE> model.output_length = lambda x: x <NEW_LINE> print(model.summary()) <NEW_LINE> return model | Maas' model.
Reference:
[1] Maas, Andrew L., et al. "Lexicon-Free Conversational Speech
Recognition with Neural Networks." HLT-NAACL. 2015. | 625941bfd53ae8145f87a1ae |
def test_is_valid_manifest_format_with_many_types_of_errors(caplog): <NEW_LINE> <INDENT> result = is_valid_manifest_format( "tests/validate_manifest_format/manifests/manifest_with_many_types_of_errors.tsv", ) <NEW_LINE> error_log = caplog.text <NEW_LINE> manifest_with_many_types_of_errors_helper(error_log) <NEW_LINE> assert result == False | Test that errors with md5, file size, url, and authz all get detected and
error logged | 625941bfd10714528d5ffc1a |
def consent_info(self, operator, ident): <NEW_LINE> <INDENT> entity = self.util.get_target(ident, restrict_to=[]) <NEW_LINE> self.check_consent_support(entity) <NEW_LINE> self.ba.can_show_consent_info(operator.get_entity_id(), entity) <NEW_LINE> consents = [] <NEW_LINE> for row in entity.list_consents(entity_id=entity.entity_id, filter_expired=False): <NEW_LINE> <INDENT> consent, consent_type = self._get_consent(int(row['consent_code'])) <NEW_LINE> consents.append({ 'consent_name': six.text_type(consent), 'consent_type': six.text_type(consent_type), 'consent_time_set': row['time_set'], 'consent_time_expire': row['expiry'], 'consent_description': row['description'], }) <NEW_LINE> <DEDENT> if not consents: <NEW_LINE> <INDENT> name = self._get_entity_name(entity.entity_id, entity.entity_type) <NEW_LINE> raise CerebrumError( "'%s' (entity_type=%s, entity_id=%s) has no consents set" % ( name, six.text_type(self.const.EntityType(entity.entity_type)), entity.entity_id)) <NEW_LINE> <DEDENT> return consents | View all set consents for a given entity. | 625941bf9b70327d1c4e0d0e |
def test_connectHTTP(self): <NEW_LINE> <INDENT> expectedHost = b'example.com' <NEW_LINE> expectedPort = 1234 <NEW_LINE> endpoint = self.agent._getEndpoint( URI.fromBytes(b'http://' + b':'.join([expectedHost, str(expectedPort).encode('charmap')]) + b'/')) <NEW_LINE> self.assertEqual(endpoint._host, expectedHost) <NEW_LINE> self.assertEqual(endpoint._port, expectedPort) <NEW_LINE> self.assertIsInstance(endpoint, TCP4ClientEndpoint) | L{Agent._getEndpoint} return a C{TCP4ClientEndpoint} when passed a
scheme of C{'http'}. | 625941bf656771135c3eb7a6 |
def ts2dt(self, ts): <NEW_LINE> <INDENT> return datetime.datetime.fromtimestamp(int(ts)) | Convert this epoch timestamp to a datetime
| 625941bfd18da76e2353240d |
def activate(self): <NEW_LINE> <INDENT> super(BotPlugin, self).activate() | Triggered on plugin activation.
Override this method if you want to do something at initialization phase
(don't forget to `super().activate()`). | 625941bfbe383301e01b53c5 |
def check_planets(self): <NEW_LINE> <INDENT> checked_list = [] <NEW_LINE> if self.mercuryCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(mercury) <NEW_LINE> <DEDENT> if self.venusCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(venus) <NEW_LINE> <DEDENT> if self.earthCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(earth) <NEW_LINE> <DEDENT> if self.marsCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(mars) <NEW_LINE> <DEDENT> if self.jupiterCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(jupiter) <NEW_LINE> <DEDENT> if self.saturnCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(saturn) <NEW_LINE> <DEDENT> if self.uranusCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(uranus) <NEW_LINE> <DEDENT> if self.neptuneCheckBox.checkState(): <NEW_LINE> <INDENT> checked_list.append(neptune) <NEW_LINE> <DEDENT> return checked_list | generates a list of all the planets that are checked | 625941bfa79ad161976cc07f |
def gram(listof, n): <NEW_LINE> <INDENT> listx = [] <NEW_LINE> for word in listof: <NEW_LINE> <INDENT> listx.extend(ngram(word,n)) <NEW_LINE> <DEDENT> return listx | By calling ngram(), this function returns a whole list of words
:param listof: A list of words to be sliced
:param n: how long the slice would be
:return: A list of sliced words. | 625941bf5f7d997b871749cf |
def get_spark_statement( self, session_id, statement_id, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> url = self.get_spark_statement.metadata['url'] <NEW_LINE> path_format_arguments = { 'endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), 'sparkPoolName': self._serialize.url("self._config.spark_pool_name", self._config.spark_pool_name, 'str', skip_quote=True), 'sessionId': self._serialize.url("session_id", session_id, 'int'), 'statementId': self._serialize.url("statement_id", statement_id, 'int'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = 'application/json' <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response) <NEW_LINE> <DEDENT> deserialized = self._deserialize('SparkStatement', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized | Gets a single statement within a spark session.
:param session_id: Identifier for the session.
:type session_id: int
:param statement_id: Identifier for the statement.
:type statement_id: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SparkStatement, or the result of cls(response)
:rtype: ~azure.synapse.spark.models.SparkStatement
:raises: ~azure.core.exceptions.HttpResponseError | 625941bf283ffb24f3c5583e |
def complete_fields(self, args, prefix): <NEW_LINE> <INDENT> super(ReceivedSharesListCommand, self).__call__(args) <NEW_LINE> cli = self.ls.rshares <NEW_LINE> return cli.get_rbu().get_keys(True) | TODO | 625941bfa8370b77170527db |
def add_default_options(self, parser): <NEW_LINE> <INDENT> parser.add_argument( '-v', '--verbosity', action='count', default=1, help="set the verbosity of output" ) <NEW_LINE> parser.add_argument( '-V', '--version', action='version', version=__version__ ) <NEW_LINE> parser.add_argument( '--no-input', action='store_false', default=True, dest="input_enabled", help="Don't ask for user input. If any action would be destructive, " "an error will be raised; otherwise, default answers will be " "assumed." ) | Add the default options that exist on *all* commands
:param parser: a stub argparse parser for the command. | 625941bf187af65679ca5058 |
def __init__( self, uri, protocol_info, import_uri=None, size=None, duration=None, bitrate=None, sample_frequency=None, bits_per_sample=None, nr_audio_channels=None, resolution=None, color_depth=None, protection=None, ): <NEW_LINE> <INDENT> self.uri = uri <NEW_LINE> self.protocol_info = protocol_info <NEW_LINE> self.import_uri = import_uri <NEW_LINE> self.size = size <NEW_LINE> self.duration = duration <NEW_LINE> self.bitrate = bitrate <NEW_LINE> self.sample_frequency = sample_frequency <NEW_LINE> self.bits_per_sample = bits_per_sample <NEW_LINE> self.nr_audio_channels = nr_audio_channels <NEW_LINE> self.resolution = resolution <NEW_LINE> self.color_depth = color_depth <NEW_LINE> self.protection = protection | Args:
uri (str): value of the ``<res>`` tag, typically a URI. It
**must** be properly escaped (percent encoded) as
described in :rfc:`3986`
protocol_info (str): a string in the form a:b:c:d that
identifies the streaming or transport protocol for
transmitting the resource. A value is required. For more
information see section 2.5.2 of the `UPnP specification [
pdf]
<http://upnp.org/specs/av/UPnP-av-ConnectionManager-v1-
Service.pdf>`_
import_uri (str, optional): uri locator for resource update.
size (int, optional): size in bytes.
duration (str, optional): duration of the playback of the res
at normal speed (``H*:MM:SS:F*`` or ``H*:MM:SS:F0/F1``)
bitrate (int, optional): bitrate in bytes/second.
sample_frequency (int, optional): sample frequency in Hz.
bits_per_sample (int, optional): bits per sample.
nr_audio_channels (int, optional): number of audio channels.
resolution (str, optional): resolution of the resource (X*Y).
color_depth (int, optional): color depth in bits.
protection (str, optional): statement of protection type.
Note:
Not all of the parameters are used by Sonos. In general, only
``uri``, ``protocol_info`` and ``duration`` seem to be important. | 625941bfbf627c535bc13109 |
def forward(self, feature_vectors): <NEW_LINE> <INDENT> conv_out = self.conv(feature_vectors.permute(0, 2, 1).float()) <NEW_LINE> post_activation = self.tanh(conv_out) <NEW_LINE> if self.max_pool: <NEW_LINE> <INDENT> return post_activation.max(2)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return post_activation.mean(2) | Output the encoding of a sentence given a batch of sequence of word embeddings
:param feature_vectors: Variable wrapping torch tensor of dimensions batch_size x sentence_length x input_size
:return: Variable wrapping a torch tensor of dimensions batch_size x output_size | 625941bfe64d504609d7477a |
def delete_cache_security_group(name, region=None, key=None, keyid=None, profile=None): <NEW_LINE> <INDENT> conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) <NEW_LINE> deleted = conn.delete_cache_security_group(name) <NEW_LINE> if deleted: <NEW_LINE> <INDENT> log.info("Deleted cache security group %s.", name) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = "Failed to delete cache security group {0}.".format(name) <NEW_LINE> log.error(msg) <NEW_LINE> return False | Delete a cache security group.
CLI example::
salt myminion boto_elasticache.delete_cache_security_group myelasticachesg 'My Cache Security Group' | 625941bfadb09d7d5db6c6cb |
def strip_sequences_at_value(sequences, value): <NEW_LINE> <INDENT> stripped = [] <NEW_LINE> for seq in sequences: <NEW_LINE> <INDENT> seq = np.array(seq) <NEW_LINE> idx = np.where(seq == value)[0] <NEW_LINE> if idx.size: <NEW_LINE> <INDENT> seq[idx[0]:] = value <NEW_LINE> <DEDENT> stripped.append(seq) <NEW_LINE> <DEDENT> return stripped | Sets all values after the first occurrence of `value` to value`.
For example:
sequences = [[0, 1, 2], [0, 1, 0, 1]]
value = 1
Returns: [[0, 1, 1], [0, 1, 1, 1]]
Args:
sequences: An iterable of integer-encoded sequences, possibly of different
length.
value: An integer value.
Returns:
A list of sequences with all values set to `value` after the first
occurrence of `value`. | 625941bf711fe17d825422ab |
def load_projects_file(): <NEW_LINE> <INDENT> return json.load(open('data/projects.json')) | Loads the projects.json file and converts it to JSON. | 625941bff7d966606f6a9f3c |
def _binary( self, other: Union["NumDict", float, int], op: Callable[[float, float], float] ) -> "NumDict": <NEW_LINE> <INDENT> _other: "NumDict" <NEW_LINE> if isinstance(other, (float, int)): <NEW_LINE> <INDENT> _other = NumDict(default=other) <NEW_LINE> <DEDENT> elif isinstance(other, NumDict): <NEW_LINE> <INDENT> _other = other <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> keys = set(self.keys()) | set(_other.keys()) <NEW_LINE> mapping = {k: op(self[k], _other[k]) for k in keys} <NEW_LINE> default: Optional[float] <NEW_LINE> if self.default is None: <NEW_LINE> <INDENT> default = None <NEW_LINE> <DEDENT> elif _other.default is None: <NEW_LINE> <INDENT> default = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> default = op(self.default, _other.default) <NEW_LINE> <DEDENT> return NumDict(mapping, default) | Apply binary op to each element of self and other.
Returns a new numdict.
If other is a constant c, acts as if other[key] = c.
If both self and other define defaults, the new default is equal to
op(self_default, other_default). Otherwise no default is defined. | 625941bf498bea3a759b99ea |
def get_fw_dict(self): <NEW_LINE> <INDENT> fw_dict = {} <NEW_LINE> if self.fw_id is None: <NEW_LINE> <INDENT> return fw_dict <NEW_LINE> <DEDENT> fw_dict = {'rules': {}, 'tenant_name': self.tenant_name, 'tenant_id': self.tenant_id, 'fw_id': self.fw_id, 'fw_name': self.fw_name, 'firewall_policy_id': self.active_pol_id, 'fw_type': self.fw_type, 'router_id': self.router_id} <NEW_LINE> if self.active_pol_id not in self.policies: <NEW_LINE> <INDENT> return fw_dict <NEW_LINE> <DEDENT> pol_dict = self.policies[self.active_pol_id] <NEW_LINE> for rule in pol_dict['rule_dict']: <NEW_LINE> <INDENT> fw_dict['rules'][rule] = self.rules[rule] <NEW_LINE> <DEDENT> return fw_dict | This API creates a FW dictionary from the local attributes. | 625941bf23e79379d52ee4a0 |
@login_required <NEW_LINE> def workflow(request): <NEW_LINE> <INDENT> groups_list = Roster.objects.filter(user=request.user).values('group') <NEW_LINE> groups = Group.objects.filter(group_id__in=groups_list) <NEW_LINE> num_groups = groups_list.count() <NEW_LINE> form = FileSearchForm(request.GET, user=request.user) <NEW_LINE> form.is_valid() <NEW_LINE> files = form.results(page=request.GET.get("page")) <NEW_LINE> num_files = File.objects.filter(uploaded_by=request.user).count() <NEW_LINE> return render(request, "users/workflow.html", { "num_groups": num_groups, "num_files": num_files, "groups": groups, "files": files, 'FileType': FileType, 'FileStatus': FileStatus, }) | Workflow page. Basically a home/profile page for users
that do not have admin access. | 625941bf99fddb7c1c9de2cd |
def encode(orig, bpe_codes, cache=None): <NEW_LINE> <INDENT> if cache is None: <NEW_LINE> <INDENT> cache = {} <NEW_LINE> <DEDENT> if orig in cache: <NEW_LINE> <INDENT> return cache[orig] <NEW_LINE> <DEDENT> word = tuple(orig) + ('</w>',) <NEW_LINE> pairs = get_pairs(word) <NEW_LINE> while True: <NEW_LINE> <INDENT> bigram = min(pairs, key = lambda pair: bpe_codes.get(pair, float('inf'))) <NEW_LINE> if bigram not in bpe_codes: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> first, second = bigram <NEW_LINE> new_word = [] <NEW_LINE> i = 0 <NEW_LINE> while i < len(word): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> j = word.index(first, i) <NEW_LINE> new_word.extend(word[i:j]) <NEW_LINE> i = j <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> new_word.extend(word[i:]) <NEW_LINE> break <NEW_LINE> <DEDENT> if word[i] == first and i < len(word)-1 and word[i+1] == second: <NEW_LINE> <INDENT> new_word.append(first+second) <NEW_LINE> i += 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_word.append(word[i]) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> <DEDENT> new_word = tuple(new_word) <NEW_LINE> word = new_word <NEW_LINE> if len(word) == 1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pairs = get_pairs(word) <NEW_LINE> <DEDENT> <DEDENT> if word[-1] == '</w>': <NEW_LINE> <INDENT> word = word[:-1] <NEW_LINE> <DEDENT> elif word[-1].endswith('</w>'): <NEW_LINE> <INDENT> word = word[:-1] + (word[-1].replace('</w>',''),) <NEW_LINE> <DEDENT> cache[orig] = word <NEW_LINE> return word | (Subword Encoding) Encode word based on list of BPE merge operations, which are applied consecutively | 625941bf7c178a314d6ef395 |
def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.discriminator = None <NEW_LINE> DreamMakerRequest.__init__(self, *args, **kwargs) | Body35 - a model defined in Swagger | 625941bf9c8ee82313fbb6af |
def testPinDescInstances(self): <NEW_LINE> <INDENT> if self.rcpod.model == "PIC16C745": <NEW_LINE> <INDENT> self.assertEqual(len(self.rcpod.pins), 19) <NEW_LINE> <DEDENT> elif self.rcpod.model == "PIC16C765": <NEW_LINE> <INDENT> self.assertEqual(len(self.rcpod.pins), 30) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fail("Unknown model number: %r" % self.rcpod.model) <NEW_LINE> <DEDENT> seenValues = [] <NEW_LINE> for name, pin in self.rcpod.pins.iteritems(): <NEW_LINE> <INDENT> self.assertEqual(getattr(self.rcpod, name), pin, "Pin not installed") <NEW_LINE> self.assert_(isinstance(pin, pyrcpod.device.Pin), "Not a pin instance") <NEW_LINE> self.assert_(pin.value not in seenValues, "Duplicate pin value") <NEW_LINE> seenValues.append(pin.value) | verify that the set of pin descriptors for this device looks sane | 625941bfec188e330fd5a6de |
def __init__( self, *, region: str, **kwargs ): <NEW_LINE> <INDENT> super(RegionForOnlineOffline, self).__init__(**kwargs) <NEW_LINE> self.region = region | :keyword region: Required. Cosmos DB region, with spaces between words and each word
capitalized.
:paramtype region: str | 625941bf8c3a8732951582f2 |
def create_instance(c_instance): <NEW_LINE> <INDENT> return Eeboo(c_instance) | Creates and returns the Launchpad script | 625941bf2c8b7c6e89b356fc |
def do_callers(self, args): <NEW_LINE> <INDENT> return self.game.print_caller_stats() | Display caller stats | 625941bf76d4e153a657ea6a |
def cross(self, father, mother): <NEW_LINE> <INDENT> do_cross = True if random() <= self.pc else False <NEW_LINE> if not do_cross: <NEW_LINE> <INDENT> return father, mother <NEW_LINE> <DEDENT> chrom1 = father.chromsome.copy() <NEW_LINE> chrom2 = mother.chromsome.copy() <NEW_LINE> for i, (g1, g2) in enumerate(zip(chrom1, chrom2)): <NEW_LINE> <INDENT> do_exchange = True if random() < self.pe else False <NEW_LINE> if do_exchange: <NEW_LINE> <INDENT> chrom1[i], chrom2[i] = g2, g1 <NEW_LINE> <DEDENT> <DEDENT> child1, child2 = father.clone(), father.clone() <NEW_LINE> child1.init(chromsome=chrom1) <NEW_LINE> child2.init(chromsome=chrom2) <NEW_LINE> return child1, child2 | Cross chromsomes of parent using uniform crossover method. | 625941bf91af0d3eaac9b950 |
def loss(y,yhat): <NEW_LINE> <INDENT> return np.mean(np.abs(y-yhat)) | :param y: the read fares
:param yhat: the estimated fares
:return: how good is the estimateed fares | 625941bf07f4c71912b113bb |
def _TerminateProcessByPid(self, pid): <NEW_LINE> <INDENT> self._RaiseIfNotRegistered(pid) <NEW_LINE> process = self._processes_per_pid[pid] <NEW_LINE> self._TerminateProcess(process) <NEW_LINE> self._StopMonitoringProcess(process) | Terminate a process that's monitored by the engine.
Args:
pid (int): process identifier (PID).
Raises:
KeyError: if the process is not registered with and monitored by the
engine. | 625941bf23e79379d52ee4a1 |
def rest_put(self, host, suburi, request_headers, request_body, user_name, password): <NEW_LINE> <INDENT> if not isinstance(request_headers, dict): request_headers = dict() <NEW_LINE> request_headers['Content-Type'] = 'application/json' <NEW_LINE> return self._op('PUT', host, suburi, request_headers, request_body, user_name, password) | REST PUT | 625941bfd6c5a10208143f83 |
def neighbors(dice): <NEW_LINE> <INDENT> neighbors_list = [] <NEW_LINE> r = dice["row"] <NEW_LINE> c = dice["column"] <NEW_LINE> m,n = maze.shape <NEW_LINE> for pair in [(r,c+1),(r,c-1),(r-1,c),(r+1,c)]: <NEW_LINE> <INDENT> if math.floor(pair[0]/m) != 0 or math.floor(pair[1]/n) != 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif maze[pair[0],pair[1]] == "*": <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> neighbors_list.append(pair) <NEW_LINE> <DEDENT> <DEDENT> return neighbors_list | Function to get all the neighbours of the given orientation of a dice
:param dice: The dictionary containing top, north, east, row and column as keys
:return: List of all the neighbours of the given orientation of a dice | 625941bfd7e4931a7ee9de57 |
def type_convert(k): <NEW_LINE> <INDENT> types = {'process_load': 0, 'ping': 3, 'service_api': 3} <NEW_LINE> if k in types: <NEW_LINE> <INDENT> return types[k] <NEW_LINE> <DEDENT> return None | History object types to return.
Possible values:
0 - numeric float;
1 - character;
2 - log;
3 - numeric unsigned;
4 - text.
Default: 3. | 625941bfb545ff76a8913d51 |
def get_sentence_context_window(self): <NEW_LINE> <INDENT> start_boundary = re.compile(r'[A-ZА-Я]\s[.?!]') <NEW_LINE> end_boundary = re.compile(r'[.?!]\s[A-ZА-Я]') <NEW_LINE> before_cw = self.wholestring[:self.tokenposition[0].wordbeg+1] <NEW_LINE> after_cw = self.wholestring[self.tokenposition[len(self.tokenposition)-1].wordend:] <NEW_LINE> start_result = re.search(start_boundary, before_cw[::-1]) <NEW_LINE> end_result = re.search(end_boundary, after_cw) <NEW_LINE> if end_result == None: <NEW_LINE> <INDENT> self.windowposition.end = len(self.wholestring) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.windowposition.end = self.tokenposition[len(self.tokenposition)-1].wordend + end_result.start() + 1 <NEW_LINE> <DEDENT> if start_result == None: <NEW_LINE> <INDENT> self.windowposition.start = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.windowposition.start = len(before_cw) - start_result.start() - 1 | This method changes ContextWindow.windowposition to include complete
sentence which results from existing ContextWindow. If existing
ContextWindow include parts of several sentences, all of them will
be added to the ContextWindow in question. | 625941bf8a43f66fc4b53fa2 |
def wrapper_GDALVectorTranslateDestName(*args): <NEW_LINE> <INDENT> return _gdal.wrapper_GDALVectorTranslateDestName(*args) | wrapper_GDALVectorTranslateDestName(char dest, Dataset srcDS, GDALVectorTranslateOptions options,
GDALProgressFunc callback = 0, void callback_data = None) -> Dataset | 625941bf7b25080760e39395 |
def skipIf(self, expr, msg=None): <NEW_LINE> <INDENT> if expr: raise self.skipException(msg) | Skip the test if the expression is true. | 625941bfbe7bc26dc91cd53f |
def handle_authenticate(self, env, data): <NEW_LINE> <INDENT> method = env.get('REQUEST_METHOD') <NEW_LINE> if method == 'POST': <NEW_LINE> <INDENT> response = self.get_auth_tokens(env, data) <NEW_LINE> <DEDENT> elif method == 'PUT': <NEW_LINE> <INDENT> response = self.renew_auth_token(env, data) <NEW_LINE> <DEDENT> elif method == 'DELETE': <NEW_LINE> <INDENT> response = self.revoke_auth_token(env, data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error = f"/authenticate endpoint accepts only POST, PUT, or DELETE methods. Received: {method}" <NEW_LINE> _log.warning(error) <NEW_LINE> return Response(error, status='405 Method Not Allowed', content_type='text/plain') <NEW_LINE> <DEDENT> return response | Callback for /authenticate endpoint.
Routes request based on HTTP method and returns a text/plain encoded token or error.
:param env:
:param data:
:return: Response | 625941bf2eb69b55b151c7e7 |
def testUsbReattach(self, oSession, oTxsSession, sUsbCtrl, sSpeed, sCaptureFile = None): <NEW_LINE> <INDENT> sGadgetHost, uGadgetPort = self.getGadgetParams(self.sHostname, sSpeed); <NEW_LINE> oUsbGadget = usbgadget.UsbGadget(); <NEW_LINE> reporter.log('Connecting to UTS: ' + sGadgetHost); <NEW_LINE> fRc = oUsbGadget.connectTo(30 * 1000, sGadgetHost, uPort = uGadgetPort); <NEW_LINE> if fRc is True: <NEW_LINE> <INDENT> self.oVBox.host.addUSBDeviceSource('USBIP', sGadgetHost, sGadgetHost + (':%s' % oUsbGadget.getUsbIpPort()), [], []); <NEW_LINE> fSuperSpeed = False; <NEW_LINE> if sSpeed == 'Super': <NEW_LINE> <INDENT> fSuperSpeed = True; <NEW_LINE> <DEDENT> fRc = oUsbGadget.impersonate(usbgadget.g_ksGadgetImpersonationTest, fSuperSpeed); <NEW_LINE> if fRc is True: <NEW_LINE> <INDENT> iBusId, _ = oUsbGadget.getGadgetBusAndDevId(); <NEW_LINE> fRc = self.attachUsbDeviceToVm(oSession, '0525', 'a4a0', iBusId, sCaptureFile); <NEW_LINE> if fRc is True: <NEW_LINE> <INDENT> self.sleep(3); <NEW_LINE> for iCycle in xrange (0, self.cUsbReattachCycles): <NEW_LINE> <INDENT> fRc = oUsbGadget.disconnectUsb(); <NEW_LINE> fRc = fRc and oUsbGadget.connectUsb(); <NEW_LINE> if not fRc: <NEW_LINE> <INDENT> reporter.testFailure('Reattach cycle %s failed on the gadget device' % (iCycle)); <NEW_LINE> break; <NEW_LINE> <DEDENT> self.sleep(1); <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> reporter.testFailure('Failed to impersonate test device'); <NEW_LINE> <DEDENT> oUsbGadget.disconnectFrom(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reporter.testFailure('Failed to connect to USB gadget'); <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> reporter.testFailure('Failed to create USB device filter'); <NEW_LINE> <DEDENT> return fRc; | Tests that rapid connect/disconnect cycles work. | 625941bfc432627299f04b7f |
def before_dispatch(sample): <NEW_LINE> <INDENT> state = api.get_workflow_status_of(sample) <NEW_LINE> if state == "stored": <NEW_LINE> <INDENT> do_action_for(sample, "recover") | Event triggered before "dispatch" transition takes place for a given sample
| 625941bf1b99ca400220a9eb |
def iter_terms(self, force: bool = False) -> Iterable[Term]: <NEW_LINE> <INDENT> return iter_terms(force=force, version=self._version_or_raise) | Iterate over terms in the ontology. | 625941bf6e29344779a6254f |
def test_filter_option_chosen(self): <NEW_LINE> <INDENT> misc.extract_settings_elvis = MagicMock(return_value=True) <NEW_LINE> check_elvis.Check.filt = MagicMock(return_value=True) <NEW_LINE> misc.setting_logger = MagicMock(side_effect=MockedLogger) <NEW_LINE> sys.argv[1] = '-filter' <NEW_LINE> return self.assertTrue(check_elvis.Check) | :return: | 625941bf091ae35668666e9e |
def get_from_file(file_path: str) -> List[TaskConfig]: <NEW_LINE> <INDENT> with open(file_path, encoding='utf-8') as file: <NEW_LINE> <INDENT> file_data = json.load(file) <NEW_LINE> <DEDENT> logging.debug('Читается файл "%s"', file_path) <NEW_LINE> result_list = [] <NEW_LINE> for cfg_id, cfg_data in file_data.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cfg = TaskConfig(cfg_id, cfg_data) <NEW_LINE> result_list.append(cfg) <NEW_LINE> logging.debug('Загружена конфигурация %s', cfg) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logging.warning('При загрузке конфигурации (%s' ') произошла ошибка', cfg_id) <NEW_LINE> <DEDENT> <DEDENT> return result_list | Получить конфигурации из файла
:param file_path: Путь до файла | 625941bf07d97122c41787c1 |
def parser(subparsers, repo): <NEW_LINE> <INDENT> desc = 'save changes to the local repository' <NEW_LINE> commit_parser = subparsers.add_parser( 'commit', help=desc, description=( desc.capitalize() + '. ' + 'By default all tracked modified files are committed. To customize the' ' set of files to be committed use the only, exclude, and include ' 'flags')) <NEW_LINE> commit_parser.add_argument( '-m', '--message', help='Commit message', dest='m') <NEW_LINE> helpers.oei_flags(commit_parser, repo) <NEW_LINE> commit_parser.set_defaults(func=main) | Adds the commit parser to the given subparsers object. | 625941bf5f7d997b871749d0 |
def fill_Organization(): <NEW_LINE> <INDENT> base_dir = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> filename = os.path.join(base_dir, "organization_list.csv") <NEW_LINE> handle = open(filename) <NEW_LINE> reader = csv.reader(handle, delimiter=";") <NEW_LINE> Data = collections.namedtuple('Data', "id name country") <NEW_LINE> submitter = fill_DictRoles() <NEW_LINE> for row in map(Data._make, reader): <NEW_LINE> <INDENT> country = get_or_create_obj( DictCountry, label=row.country) <NEW_LINE> get_or_create_obj( Organization, name=standardize_institute_name(row.name), role=submitter, country=country) <NEW_LINE> <DEDENT> handle.close() | Fill organization table | 625941bf8a43f66fc4b53fa3 |
def admin_pay(self, drinker_name, purchase, amount): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.admin_cash_position.pay_purchase(user_name=drinker_name, item_name=purchase, money_amount=amount) <NEW_LINE> self._save_admin_cash_position() <NEW_LINE> return self.get_admin_state_formatted() | :param str drinker_name:
:param str purchase: can be any string, does not need to be a buy item from the Db
:param Decimal amount: money paid (the drinker/user gets this money out from the admin cash)
:return: new state, via get_admin_state_formatted
:rtype: str | 625941bf3346ee7daa2b2ca5 |
def generate(moving, fixed): <NEW_LINE> <INDENT> vol_shape = moving.shape[1:] <NEW_LINE> ndims = len(vol_shape) <NEW_LINE> zero_phi = np.zeros([moving.shape[0], *vol_shape, ndims]) <NEW_LINE> inputs = [moving, fixed] <NEW_LINE> outputs = [fixed, zero_phi] <NEW_LINE> return inputs, outputs | Generator of inputs to Voxelmorph model in single shot | 625941bf3d592f4c4ed1cfaf |
def grouper(n, iterable): <NEW_LINE> <INDENT> it = iter(iterable) <NEW_LINE> while True: <NEW_LINE> <INDENT> chunk = tuple(itertools.islice(it, n)) <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> yield chunk | grouper(3, 'ABCDEFG') --> 'ABC', 'DEF', 'G' | 625941bf3539df3088e2e286 |
def isNumber(self, s): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> n = len(s) <NEW_LINE> while i < n and s[i] == ' ': <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> if i < n and (s[i] == '+' or s[i] == '-'): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> isNum = False <NEW_LINE> while i < n and s[i].isdigit(): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> isNum = True <NEW_LINE> <DEDENT> if (i < n and s[i] == '.'): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> while i < n and s[i].isdigit(): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> isNum = True <NEW_LINE> <DEDENT> <DEDENT> if isNum and i < n and s[i] == 'e': <NEW_LINE> <INDENT> i += 1 <NEW_LINE> isNum = False <NEW_LINE> if i < n and (s[i] == '+' or s[i] == '-'): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> while i < n and s[i].isdigit(): <NEW_LINE> <INDENT> i += 1 <NEW_LINE> isNum = True <NEW_LINE> <DEDENT> <DEDENT> while i < n and s[i] == ' ': <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> return isNum and i==n | :type s: str
:rtype: bool | 625941bfbaa26c4b54cb105d |
def new_proxy(self, ip=None, port=None, protocol=None, auth_token=None): <NEW_LINE> <INDENT> data = {} <NEW_LINE> if ip: <NEW_LINE> <INDENT> data['ip'] = ip <NEW_LINE> <DEDENT> if port: <NEW_LINE> <INDENT> data['port'] = port <NEW_LINE> <DEDENT> if protocol: <NEW_LINE> <INDENT> data['protocol'] = protocol <NEW_LINE> <DEDENT> if auth_token: <NEW_LINE> <INDENT> data['auth_token'] = auth_token <NEW_LINE> <DEDENT> return self._api_request('/proxy', method='PATCH', data=data) | Point the Hub to a new proxy | 625941bf23849d37ff7b2fcb |
def cnn_predict(images,model_path): <NEW_LINE> <INDENT> predicted_label_dict=[] <NEW_LINE> predicted_logits_dict=[] <NEW_LINE> with tf.Session() as sess: <NEW_LINE> <INDENT> ckpt_path = model_path <NEW_LINE> saver = tf.train.import_meta_graph(ckpt_path + '.meta') <NEW_LINE> saver.restore(sess, ckpt_path) <NEW_LINE> inputs = tf.get_default_graph().get_tensor_by_name('inputs:0') <NEW_LINE> classes = tf.get_default_graph().get_tensor_by_name('classes:0') <NEW_LINE> logits = tf.get_default_graph().get_tensor_by_name('logits:0') <NEW_LINE> for i in range(len(images)): <NEW_LINE> <INDENT> image_batch=[images[i]] <NEW_LINE> predicted_label = sess.run(classes, feed_dict={inputs: image_batch}) <NEW_LINE> predicted_label_dict.append(predicted_label) <NEW_LINE> predicted_logits = sess.run(logits, feed_dict={inputs: image_batch}) <NEW_LINE> predicted_logits_dict.append(predicted_logits) <NEW_LINE> <DEDENT> <DEDENT> return predicted_label_dict,predicted_logits_dict | use trained-CNN to predict the cut_images
inputs:
images
model_path
outputs:
label
logits | 625941bf67a9b606de4a7df7 |
def auto_cancel_holds(dbo): <NEW_LINE> <INDENT> sql = "UPDATE animal SET IsHold = 0 WHERE IsHold = 1 AND " "HoldUntilDate Is Not Null AND " "HoldUntilDate <= %s" % db.dd(now(dbo.timezone)) <NEW_LINE> count = db.execute(dbo, sql) <NEW_LINE> al.debug("cancelled %d holds" % (count), "animal.auto_cancel_holds", dbo) | Automatically cancels holds after the hold until date value set | 625941bfaad79263cf390978 |
def get_column_def(self): <NEW_LINE> <INDENT> static = "static" if self.static else "" <NEW_LINE> db_type = self.db_type.format(self.value_type.db_type) <NEW_LINE> return '{0} {1} {2}'.format(self.cql, db_type, static) | Returns a column definition for CQL table definition | 625941bfac7a0e7691ed400c |
def _active_contributors_id(from_date, to_date, locale, product): <NEW_LINE> <INDENT> editors = (Revision.objects .filter(created__gte=from_date) .values_list('creator', flat=True).distinct()) <NEW_LINE> reviewers = (Revision.objects .filter(reviewed__gte=from_date) .values_list('reviewer', flat=True).distinct()) <NEW_LINE> if to_date: <NEW_LINE> <INDENT> editors = editors.filter(created__lt=to_date) <NEW_LINE> reviewers = reviewers.filter(reviewed__lt=to_date) <NEW_LINE> <DEDENT> if locale: <NEW_LINE> <INDENT> editors = editors.filter(document__locale=locale) <NEW_LINE> reviewers = reviewers.filter(document__locale=locale) <NEW_LINE> <DEDENT> if product: <NEW_LINE> <INDENT> editors = editors.filter( Q(document__products=product) | Q(document__parent__products=product)) <NEW_LINE> reviewers = reviewers.filter( Q(document__products=product) | Q(document__parent__products=product)) <NEW_LINE> <DEDENT> return set(list(editors) + list(reviewers)) | Return the set of ids for the top contributors based on the params.
An active KB contributor is a user that has created or reviewed a
Revision in the given time period.
:arg from_date: start date for contributions to be included
:arg to_date: end date for contributions to be included
:arg locale: (optional) locale to filter on
:arg product: (optional) only count documents for a product | 625941bf96565a6dacc8f608 |
@invoke.task <NEW_LINE> def test(context, unit=False, cov=False, ): <NEW_LINE> <INDENT> cmd = f'pytest -s --color no --disable-pytest-warnings --tb=native frame_fixtures' <NEW_LINE> if cov: <NEW_LINE> <INDENT> cmd += ' --cov=frame_fixtures --cov-report=xml' <NEW_LINE> <DEDENT> print(cmd) <NEW_LINE> context.run(cmd) | Run tests.
| 625941bfa79ad161976cc080 |
def focus_next_child(self): <NEW_LINE> <INDENT> self.focus_target().focusNextChild() | Give focus to the next widget in the focus chain.
| 625941bfa17c0f6771cbdf8e |
def setEquation(self, eqstr, ns = {}): <NEW_LINE> <INDENT> eq = equationFromString(eqstr, self._eqfactory, buildargs=True, ns=ns) <NEW_LINE> eq.name = "eq" <NEW_LINE> for par in self._eqfactory.newargs: <NEW_LINE> <INDENT> self._addParameter(par) <NEW_LINE> <DEDENT> self._eqfactory.registerOperator("eq", eq) <NEW_LINE> self._eqfactory.wipeout(self._eq) <NEW_LINE> self._eq = eq <NEW_LINE> if self.profile is not None and self._reseq is None: <NEW_LINE> <INDENT> self.setResidualEquation('chiv') <NEW_LINE> <DEDENT> return | Set the profile equation for the FitContribution.
This sets the equation that will be used when generating the residual
for this FitContribution. The equation will be usable within
setResidualEquation as "eq", and it takes no arguments.
eqstr -- A string representation of the equation. Any Parameter
registered by addParameter or setProfile, or function
registered by setCalculator, registerFunction or
registerStringFunction can be can be used in the equation
by name. Other names will be turned into Parameters of this
FitContribution.
ns -- A dictionary of Parameters, indexed by name, that are used
in the eqstr, but not registered (default {}).
Raises ValueError if ns uses a name that is already used for a
variable. | 625941bf32920d7e50b28109 |
def add_comment(self, invoice_id, comments): <NEW_LINE> <INDENT> url = base_url + invoice_id + '/comments' <NEW_LINE> data = {} <NEW_LINE> data['payment_expected_date'] = comments.get_payment_expected_date() <NEW_LINE> data['description'] = comments.get_description() <NEW_LINE> data['show_comment_to_clients'] = comments.get_show_comment_to_clients() <NEW_LINE> json_string = { 'JSONString': dumps(data) } <NEW_LINE> response = zoho_http_client.post(url, self.details, json_string) <NEW_LINE> return parser.get_comment(response) | Add comment for an invoice.
Args:
invoice_id(str): Invoice id.
comments(instance): Comments object.
Returns:
str: Success message('Comments added.'). | 625941bf24f1403a92600aa4 |
def shortkey(self): <NEW_LINE> <INDENT> players=[self.teams[0].players[0], self.teams[1].players[0], self.teams[0].players[1], self.teams[1].players[1]] <NEW_LINE> return players | In order to write quicker return an array of four players j1 j2 j3 j4 | 625941bf82261d6c526ab3d7 |
def test_cam_quad_int_rotor_2_state_rdc(self): <NEW_LINE> <INDENT> self.flags(pcs=False, quad_int=True) <NEW_LINE> self.interpreter.run(script_file=self.cam_path+'rotor_2_state.py') <NEW_LINE> self.check_chi2(0.98319606148815675) | Test the 2-state rotor frame order model of CaM (with only RDC data). | 625941bf99cbb53fe6792b22 |
def device_info(self, deviceid): <NEW_LINE> <INDENT> return self.request('GET', '/devices/%s' % (deviceid)) | Get information about a device.
:param deviceid: The device identifier.
>>> s.device_info('002C')
{
"id" : "002C",
"name" : "Labege 4",
"type" : "4d3091a05ee16b3cc86699ab",
"last" : 1343321977,
"averageSignal": 8.065601,
"averageSnr": 8.065601,
"averageRssi": -122.56,
"state": 0,
"lat" : 43.45,
"lng" : 1.54,
"computedLocation": {
"lat" : 43.45,
"lng" : 6.54,
"radius": 500
},
"activationTime": 1404096340556,
"pac": "545CB3B17AC98BA4",
"tokenType": "CONTRACT",
"contractId": "7896541254789654aedfba4c",
"tokenEnd": 1449010800000,
"preventRenewal": false
} | 625941bf0383005118ecf51f |
def get_range_logic(data, tag, brange, type): <NEW_LINE> <INDENT> minval,maxval = brange <NEW_LINE> if minval is None: <NEW_LINE> <INDENT> minval = data[tag].min() <NEW_LINE> <DEDENT> if maxval is None: <NEW_LINE> <INDENT> maxval = data[tag].max() <NEW_LINE> <DEDENT> if type == '[]': <NEW_LINE> <INDENT> logic = (data[tag] >= minval) & (data[tag] <= maxval) <NEW_LINE> <DEDENT> elif type == '[)': <NEW_LINE> <INDENT> logic = (data[tag] >= minval) & (data[tag] < maxval) <NEW_LINE> <DEDENT> elif type == '(]': <NEW_LINE> <INDENT> logic = (data[tag] > minval) & (data[tag] <= maxval) <NEW_LINE> <DEDENT> elif type == '()': <NEW_LINE> <INDENT> logic = (data[tag] > minval) & (data[tag] < maxval) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Bad range type: '%s'" % type) <NEW_LINE> <DEDENT> return logic | logic for single range set | 625941bfcb5e8a47e48b79e8 |
def update_stress(self, series: Union[FrameorSeriesUnion, ps.TimeSeries], name: str, metadata: Optional[dict] = None) -> None: <NEW_LINE> <INDENT> series, metadata = self._parse_series_input(series, metadata) <NEW_LINE> self._update_series("stresses", series, name, metadata=metadata) | Update stresses values.
Note: the 'kind' attribute of a stress cannot be updated! To update
the 'kind' delete and add the stress again.
Parameters
----------
series : Union[FrameorSeriesUnion, ps.TimeSeries]
timeseries to update stored stress with
name : str
name of the stress to update
metadata : Optional[dict], optional
optionally provide metadata, which will update
the stored metadata dictionary, by default None | 625941bf099cdd3c635f0b97 |
def _setid(self): <NEW_LINE> <INDENT> self.id = "{}.{}.{}".format( self.category.tech, self.target.name, self.name ).lower() | Set the Unique Test ID. The ID is the plugin class name in lowercase.
Args:
None
Returns:
Nothing | 625941bfe64d504609d7477b |
def _solidLocal(self, rn): <NEW_LINE> <INDENT> return ( (1-self.rotor.gamma)*rn + self.rotor.gamma )*self.rotor.solidEqui/( (1-self.rotor.gamma)*0.75 + self.rotor.gamma ) | Distribuicao de solidez ponderada pela tracao | 625941bfb830903b967e9848 |
def export(self, defn): <NEW_LINE> <INDENT> self.exports.append(defn.__name__) <NEW_LINE> return defn | Declare a function or class as exported. | 625941bf9f2886367277a7cb |
def entr_dilute_spec(x, s_v, a, delta_0, s_th_o): <NEW_LINE> <INDENT> return s_th_o + s_v + (2 * a * R * (pd.np.log(0.5 - (x + delta_0)) - pd.np.log(x + delta_0))) | :param x: Delta_delta, change in non-stoichiometric redox extent vs. a reference
:param s_v: change in the lattice vibrational entropy caused by introducing vacancies
:param a: indicates the degrees of freedom of the defects, a < 1: additional defect ordering
:param delta_0: initial non-stoichiometry at Delta_m = 0 (reference point of the mass change data,
typically T = 400 deg C, p_O2 = 0.18 bar
Delta = delta_0 + Delta_delta
:return: fit function based on the model in Bulfin et. al., doi: 10.1039/C7TA00822H | 625941bf090684286d50ec1e |
def get_seq_lenght(seq_arry, end_symbol): <NEW_LINE> <INDENT> scale_arry = np.argmax(seq_arry, axis=2) + np.sum(seq_arry, axis=2) <NEW_LINE> end_symbol_scale = np.argmax(end_symbol) + np.sum(end_symbol) <NEW_LINE> cond = (scale_arry != end_symbol_scale).astype(np.int) <NEW_LINE> lens = cond.sum(axis=1) <NEW_LINE> return lens | return an array of the length of each sequence in seq_arry
:param seq_arry: array of sequence should be shape of [array_size, max_sequence_length, size_of_symbol]
:param end_symbol: 1-D array code of the end_symbol
:return: array of shape [array_size] | 625941bf851cf427c661a44d |
def __init__(self, number_of_variables: int = 12, number_of_objectives=3): <NEW_LINE> <INDENT> super(DTLZ2, self).__init__(number_of_variables, number_of_objectives) | :param number_of_variables: number of decision variables of the problem
| 625941bf3617ad0b5ed67e34 |
def __call__(self, environ, start_response): <NEW_LINE> <INDENT> if environ['PATH_INFO'] == '/favicon.ico': <NEW_LINE> <INDENT> return self.favicon(environ, start_response) <NEW_LINE> <DEDENT> path = environ['PATH_INFO'] <NEW_LINE> if path in self._wsregistry: <NEW_LINE> <INDENT> environ['ws4py.app'] = self <NEW_LINE> environ['identity'] = self._wsregistry[environ['PATH_INFO']] <NEW_LINE> return self.ws(environ, start_response) <NEW_LINE> <DEDENT> return self.masterweb.app_routing(environ, start_response) | Good ol' WSGI application. This is a simple demo
so I tried to stay away from dependencies. | 625941bf29b78933be1e55ec |
def _get_commands_by_app(self): <NEW_LINE> <INDENT> if not hasattr(self, "_commands_by_app"): <NEW_LINE> <INDENT> (context_cmds, cmds_by_app, favourite_cmds) = self._group_commands() <NEW_LINE> cmds = favourite_cmds <NEW_LINE> for app_name in cmds_by_app.keys(): <NEW_LINE> <INDENT> for cmd in cmds_by_app[app_name]: <NEW_LINE> <INDENT> if not cmd.favourite: <NEW_LINE> <INDENT> cmds.append(cmd) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._engine.logger.debug("Collected app commands for dynamic menu.") <NEW_LINE> self._commands_by_app = cmds <NEW_LINE> <DEDENT> return self._commands_by_app | This method returns a flattened list of registered app commands.
This is called directly as a part of the dynamic menu generation code
as houdini builds submenus when the user clicks on the top-level
Shotgun menu. This should execute quickly. | 625941bfc4546d3d9de7296d |
def calender_queue(self, month, year): <NEW_LINE> <INDENT> day = ['S', ' M', ' T', ' W', ' Th', 'F', ' S'] <NEW_LINE> days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] <NEW_LINE> values = 1 <NEW_LINE> d = 1 <NEW_LINE> m = month <NEW_LINE> y = year <NEW_LINE> y0 = y - (14 - m) // 12 <NEW_LINE> x = y0 + y0 // 4 - y0 // 100 + y0 // 400 <NEW_LINE> m0 = m + 12 * ((14 - m) // 12) - 2 <NEW_LINE> d0 = (d + x + 31 * m0 // 12) % 7 <NEW_LINE> if Utility.isleap_year(str(year)): <NEW_LINE> <INDENT> days[1] = 29 <NEW_LINE> <DEDENT> row = 6 <NEW_LINE> column = 7 <NEW_LINE> print('Your Calender is\n') <NEW_LINE> for i in range(0, 6 + 1): <NEW_LINE> <INDENT> print(day[i], end=' ') <NEW_LINE> <DEDENT> print() <NEW_LINE> for i in range(row): <NEW_LINE> <INDENT> for j in range(column): <NEW_LINE> <INDENT> if values <= days[m - 1]: <NEW_LINE> <INDENT> if i == 0 and j < d0: <NEW_LINE> <INDENT> queue.enqueue(' ') <NEW_LINE> continue <NEW_LINE> <DEDENT> queue.enqueue(values) <NEW_LINE> values += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for i in range(row): <NEW_LINE> <INDENT> for j in range(column): <NEW_LINE> <INDENT> if queue.size() > 0: <NEW_LINE> <INDENT> x = queue.dequeue() <NEW_LINE> x1 = str(x).ljust(2) <NEW_LINE> print(x1, end=" ") <NEW_LINE> <DEDENT> <DEDENT> print() | This method is used to print calender of given month and year.
In this method calender is created using queue
:param month:month given ser
:param year: year given by year
:return: nothing | 625941bfb5575c28eb68df3a |
def get_button_start_mouse(xcoord, ycoord, field_size, start_buttons): <NEW_LINE> <INDENT> square = _get_square_mouse(xcoord, ycoord, field_size) <NEW_LINE> button_boundaries = _get_button_boundaries(start_buttons) <NEW_LINE> count = 1 <NEW_LINE> if square: <NEW_LINE> <INDENT> for boundaries in button_boundaries: <NEW_LINE> <INDENT> if boundaries[0][0] <= square[0] <= boundaries[0][1] and boundaries[1][0] <= square[1] <= boundaries[1][1]: <NEW_LINE> <INDENT> return count <NEW_LINE> <DEDENT> count += 1 <NEW_LINE> <DEDENT> <DEDENT> return False | gibt den angeklickten Knopf in den Einstellungen zurueck
:param xcoord: int; x-Koordinate des Mausklicks
:param ycoord: int; y-Koordinate des Mausklicks
:param field_size: float; size of a virtual field that is determined by the size of the window that inhabits the GUI
:param start_buttons: list[Button, Button, ...]; list that holds start buttons
:return: int; button's number of the clicked button | 625941bfcc40096d6159588d |
def fetch_recent_result(self, user, query): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_collection = MongoUtil().get_collection(collection_name=settings.USER_DATA_COLLECTION) <NEW_LINE> results = list(user_collection.find(filter={"query": {'$regex': query, '$options': 'i'}})) <NEW_LINE> keywords = "\n".join(list(map(lambda x: x.get("query"), results))) <NEW_LINE> return keywords <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.exception(f"Error in fetching data in Mongo for user = {user} error = {str(e)}") <NEW_LINE> raise e | :param user: user for which recent result are being fetched
:param query: query string for recent searches
:return: keywords for recent searches matching query. | 625941bf50485f2cf553ccd4 |
def add_gene(self, gene): <NEW_LINE> <INDENT> self.genes.add(gene) | Adds Gene to the graph | 625941bfbe8e80087fb20b82 |
def recall_score(y_true, y_pred, average='micro', suffix=False): <NEW_LINE> <INDENT> true_entities = set(get_entities(y_true, suffix)) <NEW_LINE> pred_entities = set(get_entities(y_pred, suffix)) <NEW_LINE> nb_correct = len(true_entities & pred_entities) <NEW_LINE> nb_true = len(true_entities) <NEW_LINE> score = float(nb_correct) / nb_true if nb_true > 0 else 0 <NEW_LINE> return score | Compute the recall.
The recall is the ratio ``tp / (tp + fn)`` where ``tp`` is the number of
true positives and ``fn`` the number of false negatives. The recall is
intuitively the ability of the classifier to find all the positive samples.
The best value is 1 and the worst value is 0.
Args:
y_true : 2d array. Ground truth (correct) target values.
y_pred : 2d array. Estimated targets as returned by a tagger.
Returns:
score : float.
Example:
>>> from seqeval.metrics import recall_score
>>> y_true = [['O', 'O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
>>> y_pred = [['O', 'O', 'B-MISC', 'I-MISC', 'I-MISC', 'I-MISC', 'O'], ['B-PER', 'I-PER', 'O']]
>>> recall_score(y_true, y_pred)
0.50 | 625941bfcc0a2c11143dcdcc |
def trinbr(n): <NEW_LINE> <INDENT> return((n*(n+1))//2) | Return the triangular number evaluated at n. | 625941bfeab8aa0e5d26da93 |
def Interface_two(date: 'date') -> list: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> End_date = input('Please enter the end date of the analysis (YYYY-MM-DD): ').split("-") <NEW_LINE> print() <NEW_LINE> if len(End_date[0]) != 4: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ending = datetime.date(int(End_date[0]), int(End_date[1]), int(End_date[2])) <NEW_LINE> if ending <= datetime.date.today() and ending > date: <NEW_LINE> <INDENT> return End_date <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print("Please try a new end date") <NEW_LINE> print() | Asks the user for an end date to his/her analysis. | 625941bf796e427e537b04ff |
def __init__(self, version, payload, account_sid, sid=None): <NEW_LINE> <INDENT> super(AddressInstance, self).__init__(version) <NEW_LINE> self._properties = { 'account_sid': payload['account_sid'], 'city': payload['city'], 'customer_name': payload['customer_name'], 'date_created': deserialize.rfc2822_datetime(payload['date_created']), 'date_updated': deserialize.rfc2822_datetime(payload['date_updated']), 'friendly_name': payload['friendly_name'], 'iso_country': payload['iso_country'], 'postal_code': payload['postal_code'], 'region': payload['region'], 'sid': payload['sid'], 'street': payload['street'], 'uri': payload['uri'], 'emergency_enabled': payload['emergency_enabled'], 'validated': payload['validated'], } <NEW_LINE> self._context = None <NEW_LINE> self._solution = { 'account_sid': account_sid, 'sid': sid or self._properties['sid'], } | Initialize the AddressInstance
:returns: twilio.rest.api.v2010.account.address.AddressInstance
:rtype: twilio.rest.api.v2010.account.address.AddressInstance | 625941bfe76e3b2f99f3a74c |
def assert_particle_pd0_data(self, data_particle, verify_values=False): <NEW_LINE> <INDENT> self.assert_data_particle_header(data_particle, WorkhorseDataParticleType.ADCP_PD0_PARSED_BEAM) <NEW_LINE> self.assert_data_particle_parameters(data_particle, self._pd0_parameters, verify_values) | Verify an adcp ps0 data particle
@param data_particle: ADCP_PS0DataParticle data particle
@param verify_values: bool, should we verify parameter values | 625941bf76e4537e8c3515ac |
def normalized(arr, axis=-1, order=2): <NEW_LINE> <INDENT> arr = arr.astype('float') <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> minval = arr[..., i].min() <NEW_LINE> maxval = arr[..., i].max() <NEW_LINE> if minval != maxval: <NEW_LINE> <INDENT> arr[..., i] -= minval <NEW_LINE> arr[..., i] *= (255.0 / (maxval - minval)) <NEW_LINE> <DEDENT> <DEDENT> return arr | Linear normalization
http://en.wikipedia.org/wiki/Normalization_%28image_processing%29 | 625941bf925a0f43d2549db0 |
def show_image(kwargs, call=None): <NEW_LINE> <INDENT> if call != "function": <NEW_LINE> <INDENT> raise SaltCloudSystemExit( "The show_images function must be called with " "-f or --function" ) <NEW_LINE> <DEDENT> if not isinstance(kwargs, dict): <NEW_LINE> <INDENT> kwargs = {} <NEW_LINE> <DEDENT> location = get_location() <NEW_LINE> if "location" in kwargs: <NEW_LINE> <INDENT> location = kwargs["location"] <NEW_LINE> <DEDENT> params = { "Action": "DescribeImages", "RegionId": location, "ImageId": kwargs["image"], } <NEW_LINE> ret = {} <NEW_LINE> items = query(params=params) <NEW_LINE> if "Code" in items or not items["Images"]["Image"]: <NEW_LINE> <INDENT> raise SaltCloudNotFound("The specified image could not be found.") <NEW_LINE> <DEDENT> log.debug("Total %s image found in Region %s", items["TotalCount"], location) <NEW_LINE> for image in items["Images"]["Image"]: <NEW_LINE> <INDENT> ret[image["ImageId"]] = {} <NEW_LINE> for item in image: <NEW_LINE> <INDENT> ret[image["ImageId"]][item] = str(image[item]) <NEW_LINE> <DEDENT> <DEDENT> return ret | Show the details from aliyun image | 625941bf0fa83653e4656ef8 |
def get_obj_in_list(obj_name, obj_list): <NEW_LINE> <INDENT> for o in obj_list: <NEW_LINE> <INDENT> if o.name == obj_name: <NEW_LINE> <INDENT> return o <NEW_LINE> <DEDENT> <DEDENT> print("Unable to find object by the name of %s in list:\n%s" % (obj_name, map(lambda o: o.name, obj_list))) <NEW_LINE> exit(1) | récupération d'un objet dans une liste par nom | 625941bf30bbd722463cbcff |
def padanjeLika(self): <NEW_LINE> <INDENT> tabelaHitrosti = [700, 600, 500, 400, 350, 300, 250, 225, 200, 190, 180, 170, 160, 150, 140, 130, 120, 110, 100, 95, 90, 85, 80, 75, 70, 65, 60, 55, 50, 45, 40, 35, 30, 25, 20, 15, 10, 5] <NEW_LINE> hitrost = tabelaHitrosti[self.igrica.level] <NEW_LINE> if self.preveriPremik(self.rotacija, self.x, self.y+1): <NEW_LINE> <INDENT> self.igrica.platno.update() <NEW_LINE> for i in self.gids: <NEW_LINE> <INDENT> self.igrica.platno.move(i, 0, k) <NEW_LINE> <DEDENT> self.y += 1 <NEW_LINE> self.padanjeId = self.igrica.platno.after(hitrost, self.padanjeLika) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.osveziTabelo(self.rotacija, self.x, self.y) <NEW_LINE> if self.koord[0][7] is None: <NEW_LINE> <INDENT> self.igrica.naslednjiLik(self.koord) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.padanjeId is not None: <NEW_LINE> <INDENT> self.igrica.platno.after_cancel(self.padanjeId) <NEW_LINE> <DEDENT> result = messagebox.askquestion("Konec igre", "Želite igrati ponovno?", icon='warning') <NEW_LINE> if result == 'no': <NEW_LINE> <INDENT> self.igrica.koncajIgro() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.igrica.ponovnaIgra() | Lik se spušča proti dnu | 625941bfbde94217f3682d2f |
def test_json_out(): <NEW_LINE> <INDENT> frmt = get_format('GML') <NEW_LINE> outjson = frmt.json <NEW_LINE> assert outjson['schema'] == '' <NEW_LINE> assert outjson['extension'] == '.gml' <NEW_LINE> assert outjson['mime_type'] == 'application/gml+xml' <NEW_LINE> assert outjson['encoding'] == '' | Test json export
| 625941bf0a366e3fb873e754 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.