code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def get_info(): <NEW_LINE> <INDENT> return r.info()
Return stats on Redis server.
625941be01c39578d7e74d67
def SetLambda(self, *args): <NEW_LINE> <INDENT> return _itkAreaOpeningImageFilterPython.itkAreaOpeningImageFilterIF3IF3_Superclass_SetLambda(self, *args)
SetLambda(self, double _arg)
625941be2c8b7c6e89b356ef
def test_linking(self): <NEW_LINE> <INDENT> for suffix in IMP.rmf.suffixes: <NEW_LINE> <INDENT> m = IMP.kernel.Model() <NEW_LINE> print("reading pdb") <NEW_LINE> h = IMP.atom.read_pdb(self.get_input_file_name("simple.pdb"), m, IMP.atom.NonAlternativePDBSelector()) <NEW_LINE> IMP.base.set_log_level(IMP.base.SILENT) <NEW_LINE> IMP.atom.add_bonds(h) <NEW_LINE> name = self.get_tmp_file_name("test_link" + suffix) <NEW_LINE> f = RMF.create_rmf_file(name) <NEW_LINE> IMP.rmf.add_hierarchy(f, h) <NEW_LINE> IMP.rmf.save_frame(f, "zero") <NEW_LINE> del f <NEW_LINE> f = RMF.open_rmf_file_read_only(name) <NEW_LINE> IMP.rmf.link_hierarchies(f, [h]) <NEW_LINE> IMP.rmf.load_frame(f, RMF.FrameID(0)) <NEW_LINE> res = IMP.atom.get_by_type(h, IMP.atom.RESIDUE_TYPE) <NEW_LINE> nres = IMP.atom.get_next_residue(IMP.atom.Residue(res[0]))
Test that linking hierarchies works
625941bed99f1b3c44c674c1
def reset_position_for_friends_image_details_from_voter(voter, twitter_profile_image_url_https, facebook_profile_image_url_https): <NEW_LINE> <INDENT> position_list_manager = PositionListManager() <NEW_LINE> position_manager = PositionManager() <NEW_LINE> stance_we_are_looking_for = ANY_STANCE <NEW_LINE> friends_vs_public = FRIENDS_ONLY <NEW_LINE> speaker_image_url_https = None <NEW_LINE> reset_all_position_image_urls_results = [] <NEW_LINE> if positive_value_exists(twitter_profile_image_url_https): <NEW_LINE> <INDENT> speaker_image_url_https = twitter_profile_image_url_https <NEW_LINE> <DEDENT> elif positive_value_exists(facebook_profile_image_url_https): <NEW_LINE> <INDENT> speaker_image_url_https = facebook_profile_image_url_https <NEW_LINE> <DEDENT> positions_for_voter_results = position_list_manager.retrieve_all_positions_for_voter( voter.id, voter.we_vote_id, stance_we_are_looking_for, friends_vs_public) <NEW_LINE> if positions_for_voter_results['position_list_found']: <NEW_LINE> <INDENT> friends_position_list = positions_for_voter_results['position_list'] <NEW_LINE> for position_object in friends_position_list: <NEW_LINE> <INDENT> reset_position_image_urls_results = position_manager.reset_position_image_details( position_object, speaker_image_url_https=speaker_image_url_https) <NEW_LINE> reset_all_position_image_urls_results.append(reset_position_image_urls_results) <NEW_LINE> <DEDENT> <DEDENT> results = { 'success': True, 'reset_all_position_results': reset_all_position_image_urls_results } <NEW_LINE> return results
Reset all position image urls in PositionForFriends from we vote image details :param voter: :param twitter_profile_image_url_https: :param facebook_profile_image_url_https: :return:
625941bef7d966606f6a9f2e
def best_attribute(self,X,Y): <NEW_LINE> <INDENT> th, i, g = float('-inf'), 0, -1 <NEW_LINE> for j in range(X.shape[0]): <NEW_LINE> <INDENT> cur_th, cur_g = DT.best_threshold(X[j,:],Y) <NEW_LINE> if cur_g > g: <NEW_LINE> <INDENT> g = cur_g <NEW_LINE> th = cur_th <NEW_LINE> i = j <NEW_LINE> <DEDENT> <DEDENT> '''i, th = sorted([(DT.best_threshold(X[j,:],Y)[1],(j,DT.best_threshold(X[j,:],Y)[0])) for j in range(X.shape[0])], key = lambda x: x[0], reverse = True)[0][1]''' <NEW_LINE> return i, th
Find the best attribute to split the node. The attributes have continous values (int/float). Here we use information gain to evaluate the attributes. If there is a tie in the best attributes, select the one with the smallest index. Input: X: the feature matrix, a numpy matrix of shape p by n. Each element can be int/float/string. Here n is the number data instances in the node, p is the number of attributes. Y: the class labels, a numpy array of length n. Each element can be int/float/string. Output: i: the index of the attribute to split, an integer scalar th: the threshold of the attribute to split, a float scalar
625941be23e79379d52ee493
def test_has_optimizer(self): <NEW_LINE> <INDENT> from django_static.templatetags.django_static import has_optimizer <NEW_LINE> settings.DJANGO_STATIC_YUI_COMPRESSOR = 'sure' <NEW_LINE> self.assertTrue(has_optimizer('css')) <NEW_LINE> del settings.DJANGO_STATIC_YUI_COMPRESSOR <NEW_LINE> self.assertEqual(has_optimizer('css'), bool(slimmer)) <NEW_LINE> settings.DJANGO_STATIC_YUI_COMPRESSOR = 'sure' <NEW_LINE> settings.DJANGO_STATIC_CLOSURE_COMPILER = 'sure' <NEW_LINE> self.assertTrue(has_optimizer('js')) <NEW_LINE> del settings.DJANGO_STATIC_CLOSURE_COMPILER <NEW_LINE> self.assertTrue(has_optimizer('js')) <NEW_LINE> del settings.DJANGO_STATIC_YUI_COMPRESSOR <NEW_LINE> self.assertEqual(has_optimizer('js'), bool(slimmer)) <NEW_LINE> self.assertRaises(ValueError, has_optimizer, 'uh')
test the utility function has_optimizer(type)
625941bed6c5a10208143f75
def get_config_paths(self): <NEW_LINE> <INDENT> paths = [] <NEW_LINE> conf_path = os.path.realpath( os.path.join(work_path, '..', '..', 'conf')) <NEW_LINE> if self.location is not None: <NEW_LINE> <INDENT> paths.append(self.location) <NEW_LINE> <DEDENT> if os.path.exists(conf_path): <NEW_LINE> <INDENT> paths.append(os.path.join(conf_path, self.config_filename)) <NEW_LINE> <DEDENT> if not is_windows: <NEW_LINE> <INDENT> paths.append(os.path.join(os.path.expanduser('~/.local'), 'etc', appname, self.config_filename)) <NEW_LINE> <DEDENT> if is_linux or is_bsd: <NEW_LINE> <INDENT> paths.append(os.path.join( os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser( '~/.config'), appname, self.config_filename)) <NEW_LINE> if hasattr(sys, 'real_prefix') or is_bsd: <NEW_LINE> <INDENT> paths.append( os.path.join(sys.prefix, 'etc', appname, self.config_filename)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> paths.append( os.path.join('/etc', appname, self.config_filename)) <NEW_LINE> <DEDENT> <DEDENT> elif is_mac: <NEW_LINE> <INDENT> paths.append(os.path.join( os.path.expanduser('~/Library/Application Support/'), appname, self.config_filename)) <NEW_LINE> paths.append(os.path.join( sys_prefix, 'etc', appname, self.config_filename)) <NEW_LINE> <DEDENT> elif is_windows: <NEW_LINE> <INDENT> paths.append(os.path.join( os.environ.get('APPDATA'), appname, self.config_filename)) <NEW_LINE> <DEDENT> return paths
Get a list of config file paths. The list is built taking into account of the OS, priority and location. * running from source: /path/to/glances/conf * per-user install: ~/.local/etc/glances (Unix-like only) * Linux: ~/.config/glances, /etc/glances * BSD: ~/.config/glances, /usr/local/etc/glances * Mac: ~/Library/Application Support/glances, /usr/local/etc/glances * Windows: %APPDATA%\glances The config file will be searched in the following order of priority: * /path/to/file (via -C flag) * /path/to/glances/conf * user's local directory (per-user install settings) * user's home directory (per-user settings) * {/usr/local,}/etc directory (system-wide settings)
625941beff9c53063f47c121
def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('probecard-detail', args=[str(self.id)])
Returns the url to access a detail record for this probecard.
625941be435de62698dfdb78
def _get_value(self, key, func=None, split_val=None, as_boolean=False, exception_default=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if as_boolean: <NEW_LINE> <INDENT> return self.config.getboolean(key[0], key[1]) <NEW_LINE> <DEDENT> value = self.config.get(key[0], key[1]) <NEW_LINE> if split_val is not None: <NEW_LINE> <INDENT> value = value.split(split_val) <NEW_LINE> <DEDENT> if func is not None: <NEW_LINE> <INDENT> return func(value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> except (KeyError, configparser.NoSectionError, configparser.NoOptionError) as e: <NEW_LINE> <INDENT> if exception_default is not None: <NEW_LINE> <INDENT> return exception_default <NEW_LINE> <DEDENT> raise KeyError(e)
Helper method to get a value from the config
625941be283ffb24f3c55831
def runcommand(cmd): <NEW_LINE> <INDENT> p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) <NEW_LINE> errors = p.stderr.read() <NEW_LINE> result = p.stdout.read() <NEW_LINE> ret = p.wait() <NEW_LINE> if ret != 0: <NEW_LINE> <INDENT> raise Exception(errors) <NEW_LINE> <DEDENT> return result
Returns the output of a command
625941bed7e4931a7ee9de49
def test_return_array_pointer(self): <NEW_LINE> <INDENT> input = Program([FuncDecl(Id("foo"), [], ArrayPointerType(IntType()), Block([Return(CallExpr(Id("foo"), []))])), FuncDecl(Id("main"), [], VoidType(), Block([Return(ArrayCell(CallExpr(Id("foo"), []), IntLiteral(2)))]))]) <NEW_LINE> expect = "Type Mismatch In Statement: Return(ArrayCell(CallExpr(Id(foo),[]),IntLiteral(2)))" <NEW_LINE> self.assertTrue(TestChecker.test(input, expect, 489))
int[] foo(){ return foo()[]; }; void main(){ return foo()[2]; // Error }
625941bef548e778e58cd4a9
def test_get_name(self): <NEW_LINE> <INDENT> expected_name = "CTE Tech Loan" <NEW_LINE> self.assertEquals(expected_name, cte_techloan.get_name())
Tests that the name of the endpoint is correct (logging purposes)
625941be91af0d3eaac9b943
def blame_in_xml(sbox): <NEW_LINE> <INDENT> sbox.build() <NEW_LINE> wc_dir = sbox.wc_dir <NEW_LINE> file_name = "iota" <NEW_LINE> file_path = os.path.join(wc_dir, file_name) <NEW_LINE> svntest.main.file_append(file_path, "Testing svn blame --xml\n") <NEW_LINE> expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Sending'), }) <NEW_LINE> svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) <NEW_LINE> exit_code, output, error = svntest.actions.run_and_verify_svn( None, [], 'log', file_path, '--xml', '-r1:2') <NEW_LINE> date1 = None <NEW_LINE> date2 = None <NEW_LINE> for line in output: <NEW_LINE> <INDENT> if line.find("<date>") >= 0: <NEW_LINE> <INDENT> if date1 is None: <NEW_LINE> <INDENT> date1 = line <NEW_LINE> continue <NEW_LINE> <DEDENT> elif date2 is None: <NEW_LINE> <INDENT> date2 = line <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise svntest.Failure <NEW_LINE> <DEDENT> template = ['<?xml version="1.0" encoding="UTF-8"?>\n', '<blame>\n', '<target\n', ' path="' + file_path + '">\n', '<entry\n', ' line-number="1">\n', '<commit\n', ' revision="1">\n', '<author>jrandom</author>\n', '%s' % date1, '</commit>\n', '</entry>\n', '<entry\n', ' line-number="2">\n', '<commit\n', ' revision="2">\n', '<author>jrandom</author>\n', '%s' % date2, '</commit>\n', '</entry>\n', '</target>\n', '</blame>\n'] <NEW_LINE> exit_code, output, error = svntest.actions.run_and_verify_svn( None, [], 'blame', file_path, '--xml') <NEW_LINE> for i in range(0, len(output)): <NEW_LINE> <INDENT> if output[i] != template[i]: <NEW_LINE> <INDENT> raise svntest.Failure
blame output in XML format
625941beeab8aa0e5d26da84
def tcumsum(self, dtype=None, out=None): <NEW_LINE> <INDENT> dtype = self.dtype if dtype is None else dtype <NEW_LINE> return process(t=self.t, x=self.cumsum(axis=0, dtype=dtype, out=out))
Process exposing for each path and time point the cumulative sum of process values along time.
625941bebe7bc26dc91cd531
def log_request(self, r): <NEW_LINE> <INDENT> token = r.headers.get(self.header, None) <NEW_LINE> r.token = token <NEW_LINE> self.requests.append(r) <NEW_LINE> if r.token: <NEW_LINE> <INDENT> self.log.debug('[%s] %s', token or '/', r.url)
Log a request/response to the target Symfony. Extract the token from the response (from the X-Debug-Token header). Save it in the response and add the latter to self.requests for later reuse. Log it with DEBUG level if a token is found. :param r: request to log
625941be6e29344779a62541
def process_cluster(self,message): <NEW_LINE> <INDENT> COMs = [] <NEW_LINE> cluster_array = ast.literal_eval(message.data) <NEW_LINE> for cluster in cluster_array[1:]: <NEW_LINE> <INDENT> xs = [c[0] for c in cluster] <NEW_LINE> ys = [c[1] for c in cluster] <NEW_LINE> com_x, com_y = sum(xs)/len(xs), sum(ys)/len(ys) <NEW_LINE> COMs.append((com_x, com_y)) <NEW_LINE> <DEDENT> self.coms = COMs
receives an list of lists from the clustering node and processes to determine where in the map to look for the target. The first list is always a noise element and is not a clustered set of points.
625941be1b99ca400220a9dd
def trick_for_cartpole(done, reward): <NEW_LINE> <INDENT> if done: <NEW_LINE> <INDENT> return -100 <NEW_LINE> <DEDENT> return reward
trick for speed up cartpole training if done, which means agent died, set negtive reward, which help agent learn control method faster.
625941be67a9b606de4a7de8
def fix_origin(self, a, b=None): <NEW_LINE> <INDENT> if b is not None: <NEW_LINE> <INDENT> P = Point(a, b) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> P = a <NEW_LINE> <DEDENT> I = P <NEW_LINE> F = P.translate(self.Dx, self.Dy) <NEW_LINE> s = Segment(I, F) <NEW_LINE> return s
Return the segment fixed at `P`. This is the translation of `self` by `P-self`. In other words, it returns the segment which is parallel to self trough the given point. Typically it is used in the framework of affine vector.. INPUT: - ``P`` - The point on which we want to "attach" the new segment. or - two numbers that are the coordinates of the "attach" point. OUTPUT: A new segment (or vector) with initial point at `P` EXAMPLES: We can fix the origin by giving the coordinates of the new origin:: sage: from phystricks import * sage: v=AffineVector( Point(1,1),Point(2,2) ) sage: w=v.fix_origin(3,5) sage: w.I.coordinates(),w.F.coordinates() ('(3,5)', '(4,6)') We can also give a point:: sage: P=Point(-1,-pi) sage: u=w.fix_origin(P) sage: u.I.coordinates(),u.F.coordinates() ('(-1,-pi)', '(0,-pi + 1)')
625941be236d856c2ad44703
def get_table(self): <NEW_LINE> <INDENT> return self.table
Return string self.table.
625941be7b180e01f3dc472f
def test_find_items(self): <NEW_LINE> <INDENT> search_attrs = GnomeKeyring.Attribute.list_new() <NEW_LINE> (result, items) = GnomeKeyring.find_items_sync( GnomeKeyring.ItemType.GENERIC_SECRET, search_attrs) <NEW_LINE> self.assertEqual(result, GnomeKeyring.Result.OK) <NEW_LINE> print('(no attributes: %i matches) ' % len(items), end='', file=sys.stderr) <NEW_LINE> for item in items: <NEW_LINE> <INDENT> self.assertNotEqual(item.keyring, '') <NEW_LINE> for attr in GnomeKeyring.Attribute.list_to_glist(item.attributes): <NEW_LINE> <INDENT> self.assertTrue(attr.type in (GnomeKeyring.AttributeType.STRING, GnomeKeyring.AttributeType.UINT32)) <NEW_LINE> self.assertEqual(type(attr.name), type('')) <NEW_LINE> self.assertGreater(len(attr.name), 0) <NEW_LINE> if attr.type == GnomeKeyring.AttributeType.STRING: <NEW_LINE> <INDENT> self.assertEqual(type(attr.get_string()), type('')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertTrue(isinstance(attr.get_uint32()), long) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> GnomeKeyring.Attribute.list_append_string(search_attrs, 'unknown!_attr', '') <NEW_LINE> (result, items) = GnomeKeyring.find_items_sync( GnomeKeyring.ItemType.GENERIC_SECRET, search_attrs) <NEW_LINE> self.assertEqual(result, GnomeKeyring.Result.NO_MATCH) <NEW_LINE> self.assertEqual(len(items), 0)
find_items_sync()
625941bea4f1c619b28aff6c
def test_surrogate_escape(self): <NEW_LINE> <INDENT> result = uuid('scm', name="Mishal\udcc5 Pytasz") <NEW_LINE> self.assertEqual(result, '625166bdc2c4f1a207d39eb8d25315010babd73b')
Check if no errors are raised for invalid UTF-8 chars
625941be293b9510aa2c31c5
def execute_query(self): <NEW_LINE> <INDENT> self.output = requests.get( url = self.request_url, params = self.request_params )
This method should be the same for every interface. Run a pre-defined query with appropriate error handling.
625941be5fc7496912cc38ab
def test_map_aggregate_sortop(self): <NEW_LINE> <INDENT> stmts = [CREATE_STMT2, "CREATE AGGREGATE a1 (integer) (" "SFUNC = f1, STYPE = integer, SORTOP = >)"] <NEW_LINE> dbmap = self.to_map(stmts) <NEW_LINE> expmap = {'sfunc': 'f1', 'stype': 'integer', 'sortop': 'pg_catalog.>'} <NEW_LINE> assert dbmap['schema public']['aggregate a1(integer)'] == expmap
Map an aggregate with a SORTOP
625941bef8510a7c17cf9628
@APP.route(api_v0_1('hello'), methods=['GET', 'POST']) <NEW_LINE> def hello(): <NEW_LINE> <INDENT> return jsonify(with_metadata("Hello, world!"))
Hello world endpoint
625941bed8ef3951e324346a
def find_volume_id_2(onclick): <NEW_LINE> <INDENT> m = ONCLICK_PATTERN.search(onclick) <NEW_LINE> if not m: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> if m.group("_id"): <NEW_LINE> <INDENT> return m.group("_id") <NEW_LINE> <DEDENT> return ""
Find book id from the given string. The string actually is javascript function. Regular expression version is slower than string find.
625941be5510c4643540f318
def update(self, timestamp, msg, *args): <NEW_LINE> <INDENT> self.last_msg = msg <NEW_LINE> self.last_seen = timestamp <NEW_LINE> if msg == 'HELLO': <NEW_LINE> <INDENT> self.status = 'Initializing' <NEW_LINE> self.first_seen = timestamp <NEW_LINE> self.py_version = args[0] <NEW_LINE> self.abi = args[1] <NEW_LINE> self.platform = args[2] <NEW_LINE> <DEDENT> elif msg == 'SLEEP': <NEW_LINE> <INDENT> self.status = 'Waiting for jobs' <NEW_LINE> <DEDENT> elif msg == 'BYE': <NEW_LINE> <INDENT> self.terminated = True <NEW_LINE> self.status = 'Terminating' <NEW_LINE> <DEDENT> elif msg == 'BUILD': <NEW_LINE> <INDENT> self.status = 'Building {} {}'.format(args[0], args[1]) <NEW_LINE> <DEDENT> elif msg == 'SEND': <NEW_LINE> <INDENT> self.status = 'Transferring file' <NEW_LINE> <DEDENT> elif msg == 'DONE': <NEW_LINE> <INDENT> self.status = 'Cleaning up after build'
Update the slave's state from an incoming reply message. :param datetime.datetime timestamp: The time at which the message was originally sent. :param str msg: The message itself. :param *args: Any arguments sent with the message.
625941be507cdc57c6306c02
def set_path(self, config): <NEW_LINE> <INDENT> self.content_path = os.path.join(config.provenance_path, GIT_DATABASE_DIR) <NEW_LINE> self.base_path = os.path.abspath(config.base_path)
Set content path
625941be96565a6dacc8f5fa
def make_text_object(text, font, color): <NEW_LINE> <INDENT> surf = font.render(text, True, color) <NEW_LINE> return surf, surf.get_rect()
Creates a surf and rect from a text.
625941be046cf37aa974cc77
def test_get_data_bad_download_on_unsupported_dataset_address_scheme_error(self): <NEW_LINE> <INDENT> product = EOProduct( self.provider, self.eoproduct_props, productType=self.product_type ) <NEW_LINE> product.driver = mock.MagicMock(spec_set=NoDriver()) <NEW_LINE> product.driver.get_data_address.side_effect = UnsupportedDatasetAddressScheme <NEW_LINE> mock_downloader = mock.MagicMock( spec_set=Download( provider=self.provider, config=config.PluginConfig.from_mapping({"extract": False}), ) ) <NEW_LINE> mock_downloader.download.return_value = None <NEW_LINE> mock_authenticator = mock.MagicMock( spec_set=Authentication( provider=self.provider, config=config.PluginConfig.from_mapping({}) ) ) <NEW_LINE> product.register_downloader(mock_downloader, mock_authenticator) <NEW_LINE> self.assertRaises(DownloadError, product.download) <NEW_LINE> data, band = self.execute_get_data(product, give_back=("band",)) <NEW_LINE> self.assertEqual(product.driver.get_data_address.call_count, 1) <NEW_LINE> product.driver.get_data_address.assert_called_with(product, band) <NEW_LINE> self.assertIsInstance(data, xr.DataArray) <NEW_LINE> self.assertEqual(data.values.size, 0)
If downloader doesn't return the downloaded file path, get_data must return an empty array
625941be099cdd3c635f0b8a
def kronecker_graphs(i_matrix, n): <NEW_LINE> <INDENT> m = len(i_matrix) <NEW_LINE> k = int(math.log(n, m)) <NEW_LINE> for i in range(1, k): <NEW_LINE> <INDENT> size = m ** (i + 1) <NEW_LINE> if i == 1: <NEW_LINE> <INDENT> it_matrix = i_matrix <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> it_matrix = iter_matrix <NEW_LINE> <DEDENT> iter_matrix = [([0] * size) for si in range(size)] <NEW_LINE> for iter_i in range(len(it_matrix)): <NEW_LINE> <INDENT> for iter_j in range(len(it_matrix)): <NEW_LINE> <INDENT> if it_matrix[iter_i][iter_j] == 1: <NEW_LINE> <INDENT> for ii in range(m): <NEW_LINE> <INDENT> for jj in range(m): <NEW_LINE> <INDENT> iter_n = int(m) <NEW_LINE> iter_matrix[iter_i * iter_n + ii][iter_j * iter_n + jj] = i_matrix[ii][jj] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> it_matrix = iter_matrix <NEW_LINE> <DEDENT> if len(it_matrix) < n: <NEW_LINE> <INDENT> final_matrix = [([0] * n) for si in range(n)] <NEW_LINE> for i in range(len(it_matrix)): <NEW_LINE> <INDENT> for j in range(len(it_matrix)): <NEW_LINE> <INDENT> final_matrix[i][j] = it_matrix[i][j] <NEW_LINE> <DEDENT> <DEDENT> it_matrix = final_matrix <NEW_LINE> <DEDENT> return it_matrix
:param i_matrix: init matrix of begin. :param n: iter num of the net work
625941be0a366e3fb873e745
def _generateSpecial_ColourMapOpts_negativeCmap( overlayList, displayCtx, source, longArg): <NEW_LINE> <INDENT> return _generateColourMap(longArg, source.negativeCmap)
Generates arguments for the :attr:`.ColourMapOpts.negativeCmap` argument.
625941be3617ad0b5ed67e26
def get_ds_state(self) -> Tuple[List, List]: <NEW_LINE> <INDENT> processors_prefix_X = 0 <NEW_LINE> processors_in_def_state = 0 <NEW_LINE> X = self.find_cons_state(self.com_pref_states( self.number_of_nodes - 3 * self.number_of_byzantine)) <NEW_LINE> if X[0] == -1: <NEW_LINE> <INDENT> return X <NEW_LINE> <DEDENT> is_default_prefix = X[2] <NEW_LINE> for replica_structure in self.rep: <NEW_LINE> <INDENT> if(replica_structure.is_rep_state_default()): <NEW_LINE> <INDENT> processors_in_def_state += 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> if self.check_new_X_prefix(replica_structure.get_id(), X[0], is_default_prefix): <NEW_LINE> <INDENT> processors_prefix_X += 1 <NEW_LINE> <DEDENT> <DEDENT> if ((self.number_of_nodes - 3 * self.number_of_byzantine) <= processors_prefix_X < (self.number_of_nodes - 2 * self.number_of_byzantine) and ((processors_prefix_X + processors_in_def_state) >= (self.number_of_nodes - self.number_of_byzantine))): <NEW_LINE> <INDENT> return X <NEW_LINE> <DEDENT> return (-1, [], False)
Method description. Returns a prefix if suggested by at least 2f+1 and at most 3f+1 processors, and if there exists another set with the default replica state and these two sets adds up to at least 4f+1 processors. State machine specific method.
625941be91f36d47f21ac41d
def get_failed(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Returns a list of failed jobs. May return ``None`` if this is not supported by the backend.
625941be596a8972360899f1
def hasMeasures(self): <NEW_LINE> <INDENT> sel = FreeCADGui.Selection.getSelection() <NEW_LINE> if not sel: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for o in sel: <NEW_LINE> <INDENT> if not o.isDerivedFrom("App::MeasureDistance"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
checks if only measurements objects are selected
625941bedd821e528d63b0d8
def apply(self, expr, replacements, evaluation): <NEW_LINE> <INDENT> new_expr = expr.copy() <NEW_LINE> replacements = replacements.get_sequence() <NEW_LINE> for replacement in replacements: <NEW_LINE> <INDENT> if not replacement.has_form('Rule', 2) and not replacement.has_form('RuleDelayed', 2): <NEW_LINE> <INDENT> evaluation.message('ReplacePart', 'reps', Expression('List', *replacements)) <NEW_LINE> return <NEW_LINE> <DEDENT> position = replacement.leaves[0] <NEW_LINE> replace = replacement.leaves[1] <NEW_LINE> if position.has_form('List', None): <NEW_LINE> <INDENT> position = position.leaves <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position = [position] <NEW_LINE> <DEDENT> for index, pos in enumerate(position): <NEW_LINE> <INDENT> value = pos.get_int_value() <NEW_LINE> if value is None: <NEW_LINE> <INDENT> position = None <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position[index] = value <NEW_LINE> <DEDENT> <DEDENT> if position is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if replacement.get_head_name() == 'RuleDelayed': <NEW_LINE> <INDENT> replace_value = replace.evaluate(evaluation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> replace_value = replace <NEW_LINE> <DEDENT> set_part(new_expr, position, replace_value) <NEW_LINE> <DEDENT> except PartError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return new_expr
ReplacePart[expr_, {replacements___}]
625941be92d797404e3040b7
def mean_iou(pred, target, num_classes, batch=None): <NEW_LINE> <INDENT> i, u = intersection_and_union(pred, target, num_classes, batch) <NEW_LINE> iou = i.to(torch.float) / u.to(torch.float) <NEW_LINE> iou[torch.isnan(iou)] = 1 <NEW_LINE> iou = iou.mean(dim=-1) <NEW_LINE> return iou
Computes the mean intersection over union score of predictions. Args: pred (LongTensor): The predictions. target (LongTensor): The targets. num_classes (int): The number of classes. batch (LongTensor): The assignment vector which maps each pred-target pair to an example. :rtype: :class:`Tensor`
625941bed8ef3951e324346b
def getPerceptionSize(self): <NEW_LINE> <INDENT> return self.size
Assumes perception is square. This is the length of one edge
625941bed164cc6175782c7b
def to_dict(self): <NEW_LINE> <INDENT> ordered_groups = list(self.group_order) <NEW_LINE> groups_in_stack = list(self.parameter_groups.keys()) <NEW_LINE> ordered_groups += [g for g in groups_in_stack if g not in ordered_groups] <NEW_LINE> ordered_groups = [g for g in ordered_groups if g in groups_in_stack] <NEW_LINE> self.metadata.update( { "AWS::CloudFormation::Interface": { "ParameterGroups": [ { "Label": {"default": group}, "Parameters": self.parameter_groups[group], } for group in ordered_groups ], "ParameterLabels": dict( [ (parameter, {"default": label}) for parameter, label in self.parameter_labels.items() ] ), } } ) <NEW_LINE> return super(InterfaceTemplate, self).to_dict()
Overwrite 'AWS::CloudFormation::Interface' key in self.metadata (if any) with the groups and labels defined via add_parameter(), and then call super().to_dict().
625941be44b2445a33931fc5
def nll2hdf5(nll_name, h5_name): <NEW_LINE> <INDENT> h5 = H5NllSingleGrid(h5_name, nll_name) <NEW_LINE> del h5
Translates NLL files to hdf5 format. :param nll_name: NLL filename without extension :param h5_name: hdf5 filename
625941be26068e7796caec08
def strip_output(nb): <NEW_LINE> <INDENT> nb.metadata.pop('signature', None) <NEW_LINE> for cell in _cells(nb): <NEW_LINE> <INDENT> if 'outputs' in cell: <NEW_LINE> <INDENT> cell['outputs'] = [] <NEW_LINE> <DEDENT> if 'prompt_number' in cell: <NEW_LINE> <INDENT> cell['prompt_number'] = None <NEW_LINE> <DEDENT> if 'execution_count' in cell: <NEW_LINE> <INDENT> cell['execution_count'] = None <NEW_LINE> <DEDENT> <DEDENT> return nb
strip the outputs from a notebook object
625941be50485f2cf553ccc6
def create_snapshot(self, wtd_name, snapshot_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> wt_disk = self._get_wt_disk(wtd_name) <NEW_LINE> snap_id = self._conn_wmi.WT_Snapshot.Create(WTD=wt_disk.WTD)[0] <NEW_LINE> wt_snap = self._conn_wmi.WT_Snapshot(Id=snap_id)[0] <NEW_LINE> wt_snap.Description = snapshot_name <NEW_LINE> wt_snap.put() <NEW_LINE> <DEDENT> except exceptions.x_wmi as wmi_exc: <NEW_LINE> <INDENT> err_msg = _('Failed to create snapshot. ' 'WT Disk name: %(wtd_name)s ' 'Snapshot name: %(snapshot_name)s') <NEW_LINE> raise exceptions.ISCSITargetWMIException( err_msg % dict(wtd_name=wtd_name, snapshot_name=snapshot_name), wmi_exc=wmi_exc)
Driver entry point for creating a snapshot.
625941be293b9510aa2c31c6
def movesToChessboard(self, board): <NEW_LINE> <INDENT> row_direction = self.process_one_direction(board) <NEW_LINE> if row_direction == -1: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> col_direction = self.process_one_direction(zip(*board)) <NEW_LINE> if col_direction == -1: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return row_direction + col_direction
:type board: List[List[int]] :rtype: int
625941bee5267d203edcdbcd
def follow_inputs(self, node, num, space=""): <NEW_LINE> <INDENT> val = [] <NEW_LINE> top = space == "" <NEW_LINE> if num == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> val.append("{}{} {} {}".format(space, node.type, node.name, self.get_shape(node.name + ":0"))) <NEW_LINE> space += " " <NEW_LINE> for j in node.inputs: <NEW_LINE> <INDENT> val.extend(self.follow_inputs(j, num - 1, space)) <NEW_LINE> <DEDENT> if top: <NEW_LINE> <INDENT> print("\n".join(reversed(val))) <NEW_LINE> print() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return val
Follow inputs for (helpful for debugging).
625941beeab8aa0e5d26da85
def _call_GET(self, chunk, headers): <NEW_LINE> <INDENT> resp = None <NEW_LINE> parsed = urlparse(chunk.get('real_url', chunk['url'])) <NEW_LINE> try: <NEW_LINE> <INDENT> with WatchdogTimeout(self.watchdog, self.connection_timeout, ConnectionTimeout): <NEW_LINE> <INDENT> conn = io.http_connect( parsed.netloc, 'GET', parsed.path, headers) <NEW_LINE> <DEDENT> with WatchdogTimeout(self.watchdog, self.read_timeout, ChunkReadTimeout): <NEW_LINE> <INDENT> resp = conn.getresponse() <NEW_LINE> <DEDENT> if resp.status != 200: <NEW_LINE> <INDENT> self.logger.warning('Invalid GET response from %s: %s %s', chunk, resp.status, resp.reason) <NEW_LINE> resp = None <NEW_LINE> <DEDENT> <DEDENT> except (SocketError, Timeout) as err: <NEW_LINE> <INDENT> self.logger.error('ERROR fetching %s: %s', chunk, err) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.logger.exception('ERROR fetching %s', chunk) <NEW_LINE> <DEDENT> return resp
Call GET on the chunk's real URL. :returns: the response object (ready to read data)
625941bebde94217f3682d21
def get_experiment_dataset_dir(self): <NEW_LINE> <INDENT> dataset_dir = self._get_experiment_dir() / 'datasets' <NEW_LINE> if not dataset_dir.exists(): <NEW_LINE> <INDENT> dataset_dir.mkdir(parents=True, exist_ok=True) <NEW_LINE> <DEDENT> return dataset_dir
:return: A directory for storing experiment-specific dataset configs.
625941befff4ab517eb2f368
def getattr(self, obj, attribute): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = getattr(obj, attribute) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return obj[attribute] <NEW_LINE> <DEDENT> except (TypeError, LookupError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.is_safe_attribute(obj, attribute, value): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return self.unsafe_undefined(obj, attribute) <NEW_LINE> <DEDENT> return self.undefined(obj=obj, name=attribute)
Subscribe an object from sandboxed code and prefer the attribute. The attribute passed *must* be a bytestring.
625941becad5886f8bd26f08
def GetStatAttribute(self): <NEW_LINE> <INDENT> return self._GetStatAttribute()
Retrieves a stat attribute. Returns: StatAttribute: a stat attribute or None if not available.
625941bed58c6744b4257b8e
def complement(seq): <NEW_LINE> <INDENT> COMPLEMENT_TRANS = str.maketrans('TAGCtagc', 'ATCGATCG') <NEW_LINE> return seq.translate(COMPLEMENT_TRANS)
complement seq
625941bef9cc0f698b14052b
def delete_type(self, namespace_id, type_id): <NEW_LINE> <INDENT> if namespace_id is None: <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> if type_id is None: <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> response = requests.delete( self.url + self.__typesPath.format(tenant_id=self.tenantId, namespace_id=namespace_id, type_id=type_id), headers=self.__sds_headers()) <NEW_LINE> if response.status_code < 200 or response.status_code >= 300: <NEW_LINE> <INDENT> response.close() <NEW_LINE> raise SdsError("Failed to delete SdsType, " "{type_id}. {status}:{reason}". format(type_id=type_id, status=response.status_code, reason=response.text)) <NEW_LINE> <DEDENT> response.close()
Tells SDS Service to delete the type specified by 'type_id'
625941be97e22403b379cec7
def getVIMFrameRate(self): <NEW_LINE> <INDENT> if not self.proxy: <NEW_LINE> <INDENT> self.proxy = ALProxy("ALVideoDevice") <NEW_LINE> <DEDENT> return self.proxy.getVIMFrameRate()
:returns int:
625941be3cc13d1c6d3c72a9
def _is_writable(path): <NEW_LINE> <INDENT> return os.access(str(path), os.W_OK)
Tests whether path exists and is writable. >>> Path('/usr/bin/python').is_writable() False
625941bf91f36d47f21ac41e
def set_ConsumerKey(self, value): <NEW_LINE> <INDENT> super(FriendshipsLookupInputSet, self)._set_input('ConsumerKey', value)
Set the value of the ConsumerKey input for this Choreo. ((required, string) The API Key (or Consumer Key) provided by Twitter.)
625941bf4428ac0f6e5ba71f
def graficar_intensidad(self, x1=0, y1=0, x2=0, y2=0): <NEW_LINE> <INDENT> print ('La animación requiere la configuración: %matplolib qt') <NEW_LINE> print ('Puede volver al modo inline tipeando %matplolib inline') <NEW_LINE> if x1==0 & x2==0 & y1==0 & y2==0: <NEW_LINE> <INDENT> x0 = self._x0 <NEW_LINE> y0 = self._y0 <NEW_LINE> x1 = self._x1 <NEW_LINE> y1 = self._y1 <NEW_LINE> x2 = self._x2 <NEW_LINE> y2 = self._y2 <NEW_LINE> <DEDENT> fig1 = plt.figure() <NEW_LINE> eje_x, eje_y = self.perfil_intensidad(x0, y0, x1, y1, x2, y2) <NEW_LINE> subplot = fig1.add_subplot(111,xlim=(eje_x[0],eje_x[-1]),ylim=(0,255)) <NEW_LINE> perfil, = subplot.plot([],[]) <NEW_LINE> perfil.set_data(eje_x, eje_y) <NEW_LINE> def init(): <NEW_LINE> <INDENT> perfil.set_data([],[]) <NEW_LINE> return perfil, <NEW_LINE> <DEDENT> def animate(i): <NEW_LINE> <INDENT> eje_x, eje_y = self.perfil_intensidad() <NEW_LINE> perfil.set_data(eje_x, eje_y) <NEW_LINE> return perfil, <NEW_LINE> <DEDENT> anim = animation.FuncAnimation(fig1, animate, init_func=init, interval=30, blit=True) <NEW_LINE> plt.show() <NEW_LINE> return anim
Método para analizar una linea de la imagen levantando el perfil de intensidades de la misma en forma dinámica (ANIMACIÓN) Parámetros ---------- <coords> : coordenadas del análisis (por defecto las calibradas) *algunas pueden ser ignoradas en función de respetar la dirección de analisis configurada* ej: si la dirección es horizontal la coordenada y2 es irrelevante
625941bfd268445f265b4d9c
def alarm_arm_away(self, code=None): <NEW_LINE> <INDENT> self._client.arm_away(self._location_id)
Send arm away command.
625941bf627d3e7fe0d68d7c
@reroute <NEW_LINE> def new(name): <NEW_LINE> <INDENT> return None
Creates a new visual layer :param name: Name of hte layer to create :type name: str :return: Host Specific
625941bfadb09d7d5db6c6bf
def parse_project_type(headers, gef_type): <NEW_LINE> <INDENT> if isinstance(headers, dict): <NEW_LINE> <INDENT> if gef_type == "cpt": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return first_header_value(headers, "PROJECTID", index=1) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return first_header_value(headers, "PROJECTID") <NEW_LINE> <DEDENT> <DEDENT> elif gef_type == "bore": <NEW_LINE> <INDENT> return first_header_value(headers, "PROJECTID") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if gef_type == "cpt": <NEW_LINE> <INDENT> return parse_regex_cast(r"PROJECTID[\s=a-zA-Z,]*([\w-]+)", headers, str, 1) <NEW_LINE> <DEDENT> elif gef_type == "bore": <NEW_LINE> <INDENT> return parse_regex_cast(r"#PROJECTID+[^a-zA-Z]+([\w-]+)", headers, str, 1)
Function that returns the project type as an int. :param headers:(Union[Dict,str]) Dictionary or string of headers. :param gef_type: (str) String from which the gef type is given. :return: Project type number.
625941bf956e5f7376d70d9d
def peekMax(self): <NEW_LINE> <INDENT> while self.maxHeap and self.toPop_heap.get(self.maxHeap[0], 0): <NEW_LINE> <INDENT> x = heapq.heappop(self.maxHeap) <NEW_LINE> self.toPop_heap[x] -= 1 <NEW_LINE> <DEDENT> return -self.maxHeap[0][0]
:rtype: int
625941bf63f4b57ef000104e
def phi(self, x, y, z): <NEW_LINE> <INDENT> res = swdlib.swd_api_phi(self.obj, x, y, z) <NEW_LINE> return res
Calculates the velocity potential at the actual location. It is assumed that the current time has been set using the method :meth:`update_time`. Parameters ---------- x, y, z : float Position as defined in the application program [m] Returns ------- float Velocity potential at (x,y,z) [m^2/s] Raises ------ None Examples -------- >>> print("potential at (x,y,z) = ", swd.phi(x,y,z))
625941bf507cdc57c6306c03
def kind(self): <NEW_LINE> <INDENT> return self._bytes2str(self.__pairs[-1][0])
Return the kind of the entity referenced. This is the kind from the last (kind, id) pair.
625941bf711fe17d8254229e
def FindItem(self, name, index=None): <NEW_LINE> <INDENT> pass
FindItem(self: GH_IReader, name: str, index: int) -> GH_Item Finds the first item that matches the given name and index. Only items with index qualifiers are considered. Name comparisons are not case-sensitive. name: Name of item to search for. index: Index of item to search for. If less than zero, then FindItem(string name) is called instead. Returns: The item that matches the given name and index, or null of no matching item could be found. FindItem(self: GH_IReader, name: str) -> GH_Item Finds the first item that matches the given name. Only items without index qualifiers are considered. Name comparisons are not case-sensitive. name: Name of item to search for. Returns: The item that matches the given name, or null of no matching item could be found.
625941bf56ac1b37e6264102
def plot_marginal_latlon(filename, ds, origins, **kwargs): <NEW_LINE> <INDENT> _check(ds) <NEW_LINE> ds = ds.copy() <NEW_LINE> if issubclass(type(ds), DataArray): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> elif issubclass(type(ds), DataFrame): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> _plot_latlon(filename, da, origins, **kwargs)
Plots likelihood versus hypocenter location .. rubric :: Input arguments ``filename`` (`str`): Name of output image file ``ds`` (`DataArray` or `DataFrame`): Data structure containing moment tensors and corresponding misfit values ``origins`` (`list` of `Origin` objects) Origin objects corresponding to different hypocenters .. rubric :: Optional input arguments For optional argument descriptions, `see here <mtuq.graphics._plot_depth.html>`_
625941bf8e71fb1e9831d6d9
def new_rev_tum_aux(self, dni, tipo, v_so, v_new): <NEW_LINE> <INDENT> if not all([dni.get(),tipo.get()]): <NEW_LINE> <INDENT> messagebox.showerror(title='Error', message='Alguno de los campos está vacío') <NEW_LINE> <DEDENT> elif self.h.comprueba_nomalta(dni.get(),'P') == False: <NEW_LINE> <INDENT> messagebox.showinfo(title='Paciente no existente', message='El paciente con este número de historial clínico no existe') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ruta='NT' <NEW_LINE> ing=None <NEW_LINE> self.h.onco(dni.get(),ing,tipo.get(),ruta) <NEW_LINE> messagebox.showinfo(title='Guardado', message='Diagnóstico guardado') <NEW_LINE> messagebox.showinfo(title='En desarrollo', message='Las siguientes secciones de Citación e Informes se desarrollarán en futuras versiones.') <NEW_LINE> v_so.destroy() <NEW_LINE> v_new.destroy()
Auxiliar function to be able to send messageboxes
625941bfec188e330fd5a6d2
def get(self, **kwargs): <NEW_LINE> <INDENT> if self.closed: <NEW_LINE> <INDENT> raise exceptions.TranslatorMorseError( "get operation on closed translator" ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> result = self._output_queue.get(**kwargs) <NEW_LINE> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> raise exceptions.TranslatorMorseError("empty queue") <NEW_LINE> <DEDENT> self._output_queue.task_done() <NEW_LINE> return result
Retrieve and return from the processed items a new item.
625941bf009cb60464c632e2
@prettytensor.Register <NEW_LINE> def l1_regression( input_layer, target, name=PROVIDED, loss_weight=None, per_example_weights=None): <NEW_LINE> <INDENT> target = _convert_and_assert_tensors_compatible(input_layer, target) <NEW_LINE> return apply_regression(input_layer, functions.l1_regression_loss, target, [], name='%s_loss' % name, loss_weight=loss_weight, per_example_weights=per_example_weights)
Applies an L1 Regression (Sum of Absolute Error) to the target.
625941bf6fece00bbac2d66b
def go_to(self, dir_name): <NEW_LINE> <INDENT> if self.is_dir(dir_name): <NEW_LINE> <INDENT> return self.__class__(self.path_to(dir_name), permissions=self.__permissions) <NEW_LINE> <DEDENT> raise OSError( 'Directory "{}" does not exist at path "{}"'.format( dir_name, self.__path, ), )
Go to directory. Will be created new instance of self class on base path of dir_name. :param dir_name: name of directory :raises: OSError :return: WorkSpace
625941bfa934411ee37515c1
def __deserialize(self, data, klass): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if type(klass) == str: <NEW_LINE> <INDENT> if klass.startswith('list['): <NEW_LINE> <INDENT> sub_kls = re.match('list\[(.*)\]', klass).group(1) <NEW_LINE> return [self.__deserialize(sub_data, sub_kls) for sub_data in data] <NEW_LINE> <DEDENT> if klass.startswith('dict('): <NEW_LINE> <INDENT> sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2) <NEW_LINE> return {k: self.__deserialize(v, sub_kls) for k, v in iteritems(data)} <NEW_LINE> <DEDENT> if klass in ['int', 'float', 'str', 'bool', 'bytearray', "date", 'datetime', "object"]: <NEW_LINE> <INDENT> klass = eval(klass) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> klass = eval('models.' + klass) <NEW_LINE> <DEDENT> <DEDENT> if klass in [int, float, str, bool, bytearray]: <NEW_LINE> <INDENT> return self.__deserialize_primitive(data, klass) <NEW_LINE> <DEDENT> elif klass == object: <NEW_LINE> <INDENT> return self.__deserialize_object(data) <NEW_LINE> <DEDENT> elif klass == date: <NEW_LINE> <INDENT> return self.__deserialize_date(data) <NEW_LINE> <DEDENT> elif klass == datetime: <NEW_LINE> <INDENT> return self.__deserialize_datatime(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.__deserialize_model(data, klass)
Deserializes dict, list, str into an object. :param data: dict, list or str. :param klass: class literal, or string of class name. :return: object.
625941bf26068e7796caec09
def test_checkPreconditions_deferred(self): <NEW_LINE> <INDENT> resource = TestResource() <NEW_LINE> request = SimpleRequest(Site(resource), "HOOKUPS", "/") <NEW_LINE> def checkResponse(response): <NEW_LINE> <INDENT> self.assertEquals(response, responsecode.NO_CONTENT) <NEW_LINE> <DEDENT> d = resource.renderHTTP(request) <NEW_LINE> d.addCallback(checkResponse)
RenderMixin.checkPreconditions() checkPreconditions() returns a deferred
625941bf01c39578d7e74d69
@pytest.mark.parametrize("destination", control_explorer_accordions) <NEW_LINE> def test_control_explorer_tree(control_explorer_view, destination, appliance): <NEW_LINE> <INDENT> navigate_to(appliance.server, 'ControlExplorer') <NEW_LINE> accordion_name = destination.lower().replace(" ", "_") <NEW_LINE> accordion = getattr(control_explorer_view, accordion_name) <NEW_LINE> accordion.tree.click_path("All {}".format(destination))
This test checks the accordion of Control/Explorer. Steps: * Open each accordion tab and click on top node of the tree.
625941bf8a43f66fc4b53f96
def dtype_from_ctypes_type(t): <NEW_LINE> <INDENT> if issubclass(t, _ctypes.Array): <NEW_LINE> <INDENT> return _from_ctypes_array(t) <NEW_LINE> <DEDENT> elif issubclass(t, _ctypes._Pointer): <NEW_LINE> <INDENT> raise TypeError("ctypes pointers have no dtype equivalent") <NEW_LINE> <DEDENT> elif issubclass(t, _ctypes.Structure): <NEW_LINE> <INDENT> return _from_ctypes_structure(t) <NEW_LINE> <DEDENT> elif issubclass(t, _ctypes.Union): <NEW_LINE> <INDENT> raise NotImplementedError( "conversion from ctypes.Union types like {} to dtype" .format(t.__name__)) <NEW_LINE> <DEDENT> elif isinstance(t._type_, str): <NEW_LINE> <INDENT> return dtype_from_ctypes_scalar(t) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError( "Unknown ctypes type {}".format(t.__name__))
Construct a dtype object from a ctypes type
625941bf0a50d4780f666dbe
def ABphot(fileTableList,**kwargs): <NEW_LINE> <INDENT> photA = get_phot_table(fileTableList[0],apPos=[1406,1039],**kwargs) <NEW_LINE> photB = get_phot_table(fileTableList[1],apPos=[827,822],**kwargs) <NEW_LINE> comb = comb_phot([photA,photB],table_names=['A','B']) <NEW_LINE> return comb
Does the A side photometry and B side photometry, then combines the results
625941bf7c178a314d6ef389
def update(self, changes): <NEW_LINE> <INDENT> for (a, b), change in changes.iteritems(): <NEW_LINE> <INDENT> if change != 0: <NEW_LINE> <INDENT> self.upsert_node(a, change) <NEW_LINE> self.upsert_node(b, change) <NEW_LINE> <DEDENT> <DEDENT> return self.median()
Update the node-cache.
625941bf29b78933be1e55df
def send_restore_password_link(self): <NEW_LINE> <INDENT> if not self.restore_code: <NEW_LINE> <INDENT> self.generate_restore_code() <NEW_LINE> <DEDENT> context = { "speaker": self, } <NEW_LINE> send_template_email( subject=_("[PyConES 2016] Establece tu contraseña"), from_email="PyConES 2016 <contacto2016@es.pycon.org>", to=self.user.email, template_name="emails/speakers/restore_email.html", context=context )
Sends email with link to restore password.
625941bf2ae34c7f2600d060
def solve_problem(instance, relax=True, _round=False, decompose=False, _epsilon=0.01, _alpha0=5000.0, _rho=0.92, _nar=10): <NEW_LINE> <INDENT> print("Solving problem...") <NEW_LINE> start = time.time() <NEW_LINE> if not decompose: <NEW_LINE> <INDENT> problem, (u, v, p, theta, w, z, e) = create_formulation(instance, relax=relax) <NEW_LINE> problem.solve() <NEW_LINE> l_k = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> problem, l_k = solve_with_subgradient(instance, _epsilon=_epsilon, _alpha0=_alpha0, _rho=_rho, _nar=_nar) <NEW_LINE> evolve_and_fix(problem) <NEW_LINE> <DEDENT> exec_time = time.time() - start <NEW_LINE> obj = problem.objective.value() <NEW_LINE> print("Solve time: %f s" % exec_time) <NEW_LINE> if problem.status == pulp.constants.LpStatusOptimal: <NEW_LINE> <INDENT> print("Value of the objective: %f" % obj) <NEW_LINE> <DEDENT> if _round: <NEW_LINE> <INDENT> heuristic_start = time.time() <NEW_LINE> evolve_and_fix(problem) <NEW_LINE> print("Rounding time: %f s" % (time.time() - heuristic_start)) <NEW_LINE> obj = problem.objective.value() <NEW_LINE> print("Value of the objective: %f" % obj) <NEW_LINE> <DEDENT> n_violated, groups_n_violated = problem.constraints_violated() <NEW_LINE> print("Number of violated constraints: %i" % n_violated) <NEW_LINE> for group in groups_n_violated.keys(): <NEW_LINE> <INDENT> if groups_n_violated[group][0] > 0: <NEW_LINE> <INDENT> print("Number of violated constraints of group %s: %i / %i" % ( group, groups_n_violated[group][0], groups_n_violated[group][1])) <NEW_LINE> <DEDENT> <DEDENT> if problem.is_integer_solution() and n_violated == 0: <NEW_LINE> <INDENT> print("Found feasible solution to the original primal problem.") <NEW_LINE> print("Value of the objective: %f" % problem.objective.value()) <NEW_LINE> <DEDENT> total_time = time.time() - start <NEW_LINE> with open("solution.txt", "w") as f: <NEW_LINE> <INDENT> f.write("Problem status: %i\n" % problem.status) <NEW_LINE> f.write("Value of the objective: %f\n" % problem.objective.value()) <NEW_LINE> for variable in problem.variables(): <NEW_LINE> <INDENT> f.write("%s = %s\n" % (str(variable.name), str(variable.varValue))) <NEW_LINE> <DEDENT> <DEDENT> return obj, total_time, n_violated, l_k
Solve a SUC problem instance given instance constants. Returns the value of the objective function, the total execution time, the number of violated constraints (if there are any), and the value of the lagrangian dual (if lagrangian decomposition is used). Args: instance (SUPInstance): Constants of the problem instance relax (bool, optional): Whether to solve the linear relaxation of the problem _round (bool, optional): Whether to apply rounding heuristic at the end of the process _decompose (bool, optional): Whether to solve the lagrangian decomposition of the problem using subgradient method _lambda (float, optional): Constant control parameter for the dynamic steplength _epsilon (float, optional): Convergence threshold of subgradient method _alpha0 (float, optional): Initial subgradient steplength _nar (float, int): Number of subgradient iterations without primal recovery
625941bfd6c5a10208143f77
def press_printscreen(): <NEW_LINE> <INDENT> press_key(win32con.VK_SNAPSHOT)
Presses the print screen key.
625941bf435de62698dfdb7a
def add_product_to_cart(self, product_id): <NEW_LINE> <INDENT> self.customer_cart.append(product_id)
Function adds product to cart with product id
625941bffbf16365ca6f60ed
def copy(self): <NEW_LINE> <INDENT> result = Message( time = self.time, interval = self.interval, aspect = self.aspect, location = self._location._location, state = self.state, severity = self.severity, comment = self.comment ) <NEW_LINE> for val in self: <NEW_LINE> <INDENT> result[val] = self[val].copy() <NEW_LINE> <DEDENT> return result
Return a deep copy of the message instance.
625941bfe76e3b2f99f3a73f
def fetch_content(self, what_if_fail, ca_certs_path=""): <NEW_LINE> <INDENT> self.content_uri = self._addon_data.content_url <NEW_LINE> shutil.rmtree(self.CONTENT_DOWNLOAD_LOCATION, ignore_errors=True) <NEW_LINE> self.CONTENT_DOWNLOAD_LOCATION.mkdir(parents=True, exist_ok=True) <NEW_LINE> fetching_thread_name = self._fetch_files( self.content_uri_scheme, self.content_uri_path, self.CONTENT_DOWNLOAD_LOCATION, ca_certs_path, what_if_fail) <NEW_LINE> return fetching_thread_name
Initiate fetch of the content into an appropriate directory Args: what_if_fail: Callback accepting exception as an argument that should handle them in the calling layer. ca_certs_path: Path to the HTTPS certificate file
625941bfe8904600ed9f1e59
def test_get_loosers_tie(self): <NEW_LINE> <INDENT> loosers = elections.getLoosers(['abc', 'acb'], CANDIDATES) <NEW_LINE> self.assertItemsEqual(['b', 'c'], loosers)
Should correctly identified tied loosing candidates.
625941bf0fa83653e4656eeb
def _set_up_fields(self, user): <NEW_LINE> <INDENT> pass
Set up the form fields.
625941bf1b99ca400220a9df
def licenseKeyFormatting(self, S, K): <NEW_LINE> <INDENT> outoutstr = [] <NEW_LINE> index = 0 <NEW_LINE> for c in S[::-1]: <NEW_LINE> <INDENT> if c != '-': <NEW_LINE> <INDENT> index += 1 <NEW_LINE> outoutstr.append(c.upper()) <NEW_LINE> if index % (K) == 0: <NEW_LINE> <INDENT> outoutstr.append('-') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(outoutstr) > 0 and outoutstr[-1] == '-': <NEW_LINE> <INDENT> outoutstr.pop() <NEW_LINE> <DEDENT> return ''.join(outoutstr[::-1])
:type S: str :type K: int :rtype: str
625941bf379a373c97cfaa72
def _get_percents(self): <NEW_LINE> <INDENT> if self._percents is not None: <NEW_LINE> <INDENT> return self._percents <NEW_LINE> <DEDENT> from weblate.trans.models.translation import Translation <NEW_LINE> result = Translation.objects.get_percents(language=self) <NEW_LINE> self._percents = result <NEW_LINE> return result
Returns percentages of translation status.
625941bfd486a94d0b98e074
def set_subscription(self, subscribed, ignored): <NEW_LINE> <INDENT> url = self._build_url('subscription', base_url=self._api) <NEW_LINE> sub = {'subscribed': subscribed, 'ignored': ignored} <NEW_LINE> json = self._json(self._put(url, data=dumps(sub)), 200) <NEW_LINE> return Subscription(json, self) if json else None
Set the user's subscription for this thread :param bool subscribed: (required), determines if notifications should be received from this thread. :param bool ignored: (required), determines if notifications should be ignored from this thread. :returns: :class;`Subscription <Subscription>`
625941bf442bda511e8be34b
def width(self): <NEW_LINE> <INDENT> return sm.max_width(self.bStations)
Calculates the bankfull width given a certain elevation.
625941bffff4ab517eb2f369
def orc( self, path: PathOrPaths, mergeSchema: Optional[bool] = None, pathGlobFilter: Optional[Union[bool, str]] = None, recursiveFileLookup: Optional[Union[bool, str]] = None, modifiedBefore: Optional[Union[bool, str]] = None, modifiedAfter: Optional[Union[bool, str]] = None, ) -> "DataFrame": <NEW_LINE> <INDENT> self._set_opts( mergeSchema=mergeSchema, pathGlobFilter=pathGlobFilter, modifiedBefore=modifiedBefore, modifiedAfter=modifiedAfter, recursiveFileLookup=recursiveFileLookup, ) <NEW_LINE> if isinstance(path, str): <NEW_LINE> <INDENT> path = [path] <NEW_LINE> <DEDENT> return self._df(self._jreader.orc(_to_seq(self._spark._sc, path)))
Loads ORC files, returning the result as a :class:`DataFrame`. .. versionadded:: 1.5.0 Parameters ---------- path : str or list Other Parameters ---------------- Extra options For the extra options, refer to `Data Source Option <https://spark.apache.org/docs/latest/sql-data-sources-orc.html#data-source-option>`_ in the version you use. .. # noqa Examples -------- >>> df = spark.read.orc('python/test_support/sql/orc_partitioned') >>> df.dtypes [('a', 'bigint'), ('b', 'int'), ('c', 'int')]
625941bfd18da76e23532402
def test_all_muscles_have_wormbaseID(self): <NEW_LINE> <INDENT> muscles = self.qctx(Worm)().muscles() <NEW_LINE> for muscle_object in muscles: <NEW_LINE> <INDENT> self.assertNotEqual(muscle_object.wormbaseID(), '')
This test verifies that every muscle has a Wormbase ID.
625941bfab23a570cc2500af
def test_without_test_cookie(self): <NEW_LINE> <INDENT> validate_test_cookie(self.form, self.request) <NEW_LINE> self.assertIn('submit', self.form.errors) <NEW_LINE> self.assertEqual(self.form.errors['submit'], ['Cookies must be enabled.'])
Testing validate_test_cookie without test cookie set
625941bf6aa9bd52df036cd2
def intersect_line(self,line): <NEW_LINE> <INDENT> if self==line: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> elif self.is_parallel(line): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._intersect_line_skew(line)
Returns the intersection of this line with the supplied line. :param line: A line. :type line: Line2D or Line3D :return: Returns a line (this line) if lines are collinear. Returns None (i.e. no intersection) if lines are parallel. For 2D, returns a point if lines are skew. For 3D, returns either None or a point if lines are skew. :rtype: None, Point2D, Point3D, Line2D, Line3D. :Example: .. code-block:: python # 2D example >>> l1 = Line2D(Point2D(0,0), Vector2D(1,0)) >>> l2 = Line2D(Point2D(0,0), Vector2D(0,1)) >>> result = l.intersect_line(l2) >>> print(result) Point2D(0,0) # 3D example >>> l1 = Line3D(Point3D(0,0,0), Vector3D(1,0,0)) >>> l2 = Line3D(Point3D(0,0,1), Vector3D(1,0,0)) >>> result = l1.intersect_line(l2) >>> print(result) None .. seealso:: `<https://geomalgorithms.com/a05-_intersect-1.html>`_
625941bf498bea3a759b99df
def __enter__(self) -> list[State]: <NEW_LINE> <INDENT> self.now = dt_util.utcnow() <NEW_LINE> return self.states
Record time from which to track changes.
625941bfbe7bc26dc91cd534
def __bool__(self): <NEW_LINE> <INDENT> return self._x != 0 or self._y != 0
bool operator for Python 3.
625941bf63b5f9789fde7014
def find_torrent(self, info_hash): <NEW_LINE> <INDENT> return(common.find_torrent(info_hash, self.get_torrents()))
Frontend for rtorrent.common.find_torrent
625941bf67a9b606de4a7deb
def sign_message(data, api_key): <NEW_LINE> <INDENT> dataJson = json.dumps(data) <NEW_LINE> message = base64.b64encode(dataJson) <NEW_LINE> timestamp = int(time.time()) <NEW_LINE> shared_secret = get_private_key(api_key) <NEW_LINE> sig = hmac.new(shared_secret, '{m} {ts}'.format(m=message, ts=timestamp), hashlib.sha1).hexdigest() <NEW_LINE> return '{m}, {sig}, {ts}'.format(m=message, sig=sig, ts=timestamp)
Create a signed message This is only required for testing.
625941bf566aa707497f449c
def io_axis_indices(coordmap, axis_id, fix0=True): <NEW_LINE> <INDENT> in_dims = list(coordmap.function_domain.coord_names) <NEW_LINE> out_dims = list(coordmap.function_range.coord_names) <NEW_LINE> in_dim, out_dim, is_str = None, None, False <NEW_LINE> if isinstance(axis_id, int): <NEW_LINE> <INDENT> in_dim = axis_id if axis_id >=0 else len(in_dims) + axis_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if axis_id in in_dims: <NEW_LINE> <INDENT> in_dim = in_dims.index(axis_id) <NEW_LINE> <DEDENT> elif axis_id in out_dims: <NEW_LINE> <INDENT> out_dim = out_dims.index(axis_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AxisError('No input or output dimension with name (%s)' % axis_id) <NEW_LINE> <DEDENT> is_str = True <NEW_LINE> <DEDENT> if out_dim is None: <NEW_LINE> <INDENT> out_dim = axmap(coordmap, 'in2out', fix0=fix0)[in_dim] <NEW_LINE> if (is_str and axis_id in out_dims and out_dim != out_dims.index(axis_id)): <NEW_LINE> <INDENT> raise AxisError('Input and output axes with the same name but ' 'the axes do not appear to correspond') <NEW_LINE> <DEDENT> <DEDENT> elif in_dim is None: <NEW_LINE> <INDENT> in_dim = axmap(coordmap, 'out2in', fix0=fix0)[out_dim] <NEW_LINE> <DEDENT> return in_dim, out_dim
Return input and output axis index for id `axis_id` in `coordmap` Parameters ---------- cm : class:`AffineTransform` Affine coordinate map instance axis_id : int or str If int, gives index of *input* axis. Can be negative, so that -2 refers to the second from last input axis. If str, gives name of input *or* output axis. If `axis_id` is a str, it must be unambiguous - if the named axis exists in both input and output, and they do not correspond, raises a AxisError. See Raises section for checks fix0: bool, optional Whether to fix potential 0 column / row in affine Returns ------- in_index : None or int index of input axis that corresponds to `axis_id` out_index : None or int index of output axis that corresponds to `axis_id` Raises ------ AxisError: if `axis_id` is a str and does not match any input or output coordinate names. AxisError: if the named `axis_id` exists in both input and output, and they do not correspond. Examples -------- >>> aff = [[0, 1, 0, 10], [1, 0, 0, 11], [0, 0, 1, 12], [0, 0, 0, 1]] >>> cmap = AffineTransform('ijk', 'xyz', aff) >>> io_axis_indices(cmap, 0) (0, 1) >>> io_axis_indices(cmap, 1) (1, 0) >>> io_axis_indices(cmap, -1) (2, 2) >>> io_axis_indices(cmap, 'j') (1, 0) >>> io_axis_indices(cmap, 'y') (0, 1)
625941bf4a966d76dd550f3c
def eiFilt(autom, state, replacement): <NEW_LINE> <INDENT> autom.addFilter('eiFilt state:{} replacement:{}'.format( state, replacement)) <NEW_LINE> assert state >= 0 and state < autom.stateCount <NEW_LINE> assert replacement >= 0 and replacement <= autom.stateCount <NEW_LINE> if replacement == autom.stateCount: <NEW_LINE> <INDENT> autom.stateCount = replacement + 1 <NEW_LINE> <DEDENT> l = autom.aut <NEW_LINE> width = autom.width <NEW_LINE> height = autom.height <NEW_LINE> for i in range(height): <NEW_LINE> <INDENT> if l[i][0] == state and l[i][width-1] == state: <NEW_LINE> <INDENT> q = queue.Queue() <NEW_LINE> l[i][0] = replacement <NEW_LINE> l[i][width-1] = replacement <NEW_LINE> q.put((i, 0)) <NEW_LINE> q.put((i, width-1)) <NEW_LINE> while not q.empty(): <NEW_LINE> <INDENT> (x, y) = q.get() <NEW_LINE> if x > 0 and l[x-1][y] == state: <NEW_LINE> <INDENT> l[x-1][y] = replacement <NEW_LINE> q.put((x-1, y)) <NEW_LINE> <DEDENT> if x < height-1 and l[x+1][y] == state: <NEW_LINE> <INDENT> l[x+1][y] = replacement <NEW_LINE> q.put((x+1, y)) <NEW_LINE> <DEDENT> if l[x][(y-1)%width] == state: <NEW_LINE> <INDENT> l[x][(y-1)%width] = replacement <NEW_LINE> q.put((x, (y-1)%width)) <NEW_LINE> <DEDENT> if l[x][(y+1)%width] == state: <NEW_LINE> <INDENT> l[x][(y+1)%width] = replacement <NEW_LINE> q.put((x, (y+1)%width))
Edge island filter. Replaces all islands of "state" that wrap around the left and right edges of autom.
625941bfbd1bec0571d9055d
def get_provided(self, id_): <NEW_LINE> <INDENT> for provider in self.root.providers: <NEW_LINE> <INDENT> if hasattr(provider, 'ns_provide'): <NEW_LINE> <INDENT> provided = provider.ns_provide(id_) <NEW_LINE> if provided: <NEW_LINE> <INDENT> if provided == 'USE_EXISTING': <NEW_LINE> <INDENT> w = self.top().find_by_id(id_) <NEW_LINE> if w: <NEW_LINE> <INDENT> if not hasattr(w, '_ns_id'): <NEW_LINE> <INDENT> w._ns_id = id_ <NEW_LINE> <DEDENT> return w <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not hasattr(provided, '_ns_id'): <NEW_LINE> <INDENT> provided._ns_id = id_ <NEW_LINE> <DEDENT> return provided <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return None
IMPORTANT: nested_splitter should set the _ns_id attribute *only* if the provider doesn't do it itself. That allows the provider to encode state information in the id. Also IMPORTANT: nested_splitter should call all providers for each id_, not just providers which previously advertised the id_. E.g. a provider which advertises leo_bookmarks_show may also be the correct provider for leo_bookmarks_show:4532.234 - let the providers decide in ns_provide().
625941bf4527f215b584c389
def __init__(self, type=None): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.displayName = _u("") <NEW_LINE> self.pattern = _u("")
Initialize the object.
625941bfec188e330fd5a6d3
def sur_setp(bigB,varb): <NEW_LINE> <INDENT> vvb = varb.diagonal() <NEW_LINE> n_eq = len(bigB.keys()) <NEW_LINE> bigK = np.zeros((n_eq,1),dtype=np.int_) <NEW_LINE> for r in range(n_eq): <NEW_LINE> <INDENT> bigK[r] = bigB[r].shape[0] <NEW_LINE> <DEDENT> b = sur_dict2mat(bigB) <NEW_LINE> se = np.sqrt(vvb) <NEW_LINE> se.resize(len(se),1) <NEW_LINE> t = np.divide(b,se) <NEW_LINE> tp = stats.norm.sf(abs(t))*2 <NEW_LINE> surinf = np.hstack((se,t,tp)) <NEW_LINE> surinfdict = sur_mat2dict(surinf,bigK) <NEW_LINE> return surinfdict
Utility to compute standard error, t and p-value Parameters ---------- bigB : dictionary of regression coefficient estimates, one vector by equation varb : variance-covariance matrix of coefficients Returns ------- surinfdict : dictionary with standard error, t-value, and p-value array, one for each equation
625941bf097d151d1a222d8b
def enhancedSort(liste, comparateur, ordre): <NEW_LINE> <INDENT> return sorted(liste, key=operator.attrgetter(comparateur), reverse=ordre)
Trie une liste d'objets selon le comparateur. Entree : La liste Le/les attributs de l'objet servant de comparateur(s) (str) Ordre de tri (True: décroissant / False: croissant) Sortie : La liste de dictionnaires triée.
625941bfac7a0e7691ed4000
def test_POST_bulk_feature(self): <NEW_LINE> <INDENT> c = Client() <NEW_LINE> c.login(username='admin', password='admin') <NEW_LINE> response = c.get(self.url) <NEW_LINE> formset = response.context['formset'] <NEW_LINE> POST_data = self._POST_data_from_formset(formset) <NEW_LINE> POST_data['form-0-BULK'] = 'yes' <NEW_LINE> POST_data['form-1-BULK'] = 'yes' <NEW_LINE> POST_data['bulk_action'] = 'feature' <NEW_LINE> POST_response = c.post(self.url, POST_data) <NEW_LINE> self.assertStatusCodeEquals(POST_response, 302) <NEW_LINE> self.assertEqual(POST_response['Location'], 'http://%s%s?successful' % ( 'testserver', self.url)) <NEW_LINE> video1 = Video.objects.get(pk=POST_data['form-0-id']) <NEW_LINE> self.assertTrue(video1.last_featured is not None) <NEW_LINE> video2 = Video.objects.get( pk=POST_data['form-1-id']) <NEW_LINE> self.assertTrue(video2.last_featured is not None)
A POST request to the bulk_edit view with a valid formset and a POST['bulk_action'] of 'feature' should feature the videos with the bulk option checked.
625941bf379a373c97cfaa73
def main(): <NEW_LINE> <INDENT> algs = ALGORITHMS.keys() <NEW_LINE> results_table = [] <NEW_LINE> if not os.path.exists("results.txt"): <NEW_LINE> <INDENT> results_table.append(results_header(results_table, algs)) <NEW_LINE> <DEDENT> args = get_args() <NEW_LINE> train_data = get_data(args, 1) <NEW_LINE> train_col = [row[len(row)-1] for row in train_data] <NEW_LINE> for row in train_data: <NEW_LINE> <INDENT> del row[len(row)-1] <NEW_LINE> <DEDENT> train, test, train_result, test_result = train_test_split( train_data, train_col, test_size=0.2 ) <NEW_LINE> dataset = [train, train_result, test, test_result] <NEW_LINE> acc = [] <NEW_LINE> for alg in ALGORITHMS.values(): <NEW_LINE> <INDENT> model = alg <NEW_LINE> model.fit(dataset[0], dataset[1]) <NEW_LINE> result = model.predict(dataset[2]) <NEW_LINE> accuracy = check(result, dataset[2], dataset[3]) <NEW_LINE> accuracy *= 100000 <NEW_LINE> accuracy = int(accuracy) <NEW_LINE> accuracy = float(accuracy) <NEW_LINE> accuracy /= 100000 <NEW_LINE> acc.append(accuracy) <NEW_LINE> <DEDENT> results_table = add_to_list( results_table, args[1], [len(train_data), len(train_data[0])], "80/20", acc ) <NEW_LINE> write_table(results_table)
test all algs
625941bfd8ef3951e324346c
@misc.raise_privileges <NEW_LINE> def split_partition(device_path, partition_path, new_size_in_mb): <NEW_LINE> <INDENT> disk_dic = get_devices() <NEW_LINE> disk = disk_dic[device_path] <NEW_LINE> part_dic = get_partitions(disk) <NEW_LINE> part = part_dic[partition_path] <NEW_LINE> if not check_mounted(part): <NEW_LINE> <INDENT> delete_partition(disk, part) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(partition_path + ' is mounted, unmount first') <NEW_LINE> return False <NEW_LINE> <DEDENT> sec_size = disk.sectorSize <NEW_LINE> logging.debug("Sec size: %d", sec_size) <NEW_LINE> units = 1000000 <NEW_LINE> start_sector = part.geometry.start <NEW_LINE> old_end_sector = part.gemotry.end <NEW_LINE> old_length = part.geometry.length <NEW_LINE> old_size_in_mb = old_length * sec_size / units <NEW_LINE> new_length = int(new_size_in_mb * units / sec_size) <NEW_LINE> new_end_sector = start_sector + new_length <NEW_LINE> my_geometry = geom_builder(disk, start_sector, new_end_sector, new_size_in_mb) <NEW_LINE> logging.debug("create_partition %s", my_geometry) <NEW_LINE> create_partition(disk, 0, my_geometry) <NEW_LINE> new_size_in_mb = old_size_in_mb - new_size_in_mb <NEW_LINE> start_sector = new_end_sector + 1 <NEW_LINE> end_sector = old_end_sector <NEW_LINE> my_geometry = geom_builder(disk, start_sector, end_sector, new_size_in_mb) <NEW_LINE> logging.debug("create_partition %s", my_geometry) <NEW_LINE> create_partition(disk, 0, my_geometry) <NEW_LINE> finalize_changes(disk)
Shrinks partition and splits it in two. ALERT: The filesystem must be resized before trying this!
625941bf85dfad0860c3ad89