code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def redeem_voucher(self, id): <NEW_LINE> <INDENT> if not self.in_sandbox: <NEW_LINE> <INDENT> raise NotSupportedInProduction() <NEW_LINE> <DEDENT> return self._get('/vouchers/' + id + '/redeem')
Redeems a voucher :param String id The ID of a specific voucher
625941bfd6c5a10208143f7a
def as_sql(self, qn, connection): <NEW_LINE> <INDENT> lhs, lhs_params = self.process_lhs(qn, connection) <NEW_LINE> rhs, rhs_params = self.process_rhs(qn, connection) <NEW_LINE> params = lhs_params + rhs_params <NEW_LINE> lookup_data = self.get_lookup_data() <NEW_LINE> operator = lookup_data['operator'] <NEW_LINE> lookup = lookup_data['lookup_func'](self.lookup_name) <NEW_LINE> path_elem = lookup_data['path_elem'] <NEW_LINE> return "%s %s %s %s %s" % (lhs, path_elem, lookup, operator, rhs), params
Looks up the json path as string
625941bff8510a7c17cf962d
def find_files_and_time(postpath='.'): <NEW_LINE> <INDENT> assert isdir(expanduser(postpath)), "find_files_and_time: The input must be a directory." <NEW_LINE> files_h5 = glob.glob(postpath+'/post*.h5') ; files_h5.sort() <NEW_LINE> files_xmf = glob.glob(postpath+'/post*.xmf') ; files_xmf.sort() <NEW_LINE> nfilemax = len(files_h5) <NEW_LINE> time = np.zeros(nfilemax) <NEW_LINE> try: <NEW_LINE> <INDENT> for i in range(0,nfilemax): <NEW_LINE> <INDENT> file_xmf = open(files_xmf[i],'r') <NEW_LINE> lines = file_xmf.readlines() <NEW_LINE> time[i] = np.double(lines[5][18:30]) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("find_files_and_time: Problem with reading in the time from the .xmf files.") <NEW_LINE> raise <NEW_LINE> <DEDENT> return files_h5, files_xmf, time
Creates a list of the HDF5 and xmf files in a directory, and reads in the time from the xmf files into an array. Sample usage ------------ files_h5, files_xmf, time = find_files_and_time('.') Notes ----- The time calculation is sensitive to the format of the xmf files. If that format changes, then this routine will have to be modified.
625941bf60cbc95b062c6474
def save(self,filename,overwrite=False,save_format='h5'): <NEW_LINE> <INDENT> self.model.save_weights(filename,overwrite=overwrite,save_format=save_format)
save方法是用于保存网络模型到特定文件
625941bf3c8af77a43ae36d0
def parse_image_meta(img_meta): <NEW_LINE> <INDENT> ori_shape = img_meta[0:3] <NEW_LINE> img_shape = img_meta[3:6] <NEW_LINE> pad_shape = img_meta[6:9] <NEW_LINE> scale_factor = img_meta[9] <NEW_LINE> flip = img_meta[10] <NEW_LINE> return { 'ori_shape': ori_shape.astype(np.int32), 'img_shape': img_shape.astype(np.int32), 'pad_shape': pad_shape.astype(np.int32), 'scale_factor': scale_factor.astype(np.float32), 'flip': flip.astype(np.bool), }
Parses an array that contains image attributes to its components. Args --- meta: [11] Returns --- a dict of the parsed values.
625941bfd10714528d5ffc12
def event_info(self): <NEW_LINE> <INDENT> event_dict = self.event_dict() <NEW_LINE> event_string = ( 'M %(mmi)s %(date)s %(time)s ' '%(latitude-name)s: %(latitude-value)s ' '%(longitude-name)s: %(longitude-value)s ' '%(depth-name)s: %(depth-value)s%(depth-unit)s ' '%(located-label)s %(distance)s%(distance-unit)s ' '%(bearing-compass)s ' '%(direction-relation)s %(place-name)s') % event_dict <NEW_LINE> return event_string
Get a short paragraph describing the event. :return: A string describing the event e.g. 'M 5.0 26-7-2012 2:15:35 Latitude: 0°12'36.00"S Longitude: 124°27'0.00"E Depth: 11.0km Located 2.50km SSW of Tondano' :rtype: str
625941bfa8370b77170527d2
def get_angle(self): <NEW_LINE> <INDENT> return 0
Return the angle [deg] of these axes relative to the *c*, *a* axes.
625941bf851cf427c661a444
def _connection(self): <NEW_LINE> <INDENT> requests_session = requests.Session() <NEW_LINE> retries = Retry(total=self.retries, connect=self.retries, read=self.retries, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504]) <NEW_LINE> if sys.platform in ['darwin']: <NEW_LINE> <INDENT> requests_session.trust_env = False <NEW_LINE> <DEDENT> requests_session.mount('http://', HTTPAdapter(max_retries=retries)) <NEW_LINE> return requests_session
Enables retries to cope with CoreNLP server boot-up latency. See: http://stackoverflow.com/a/35504626 Create a new object per connection to make multiprocessing threadsafe. :return:
625941bfbe383301e01b53bd
def _integral_points_PALP(self): <NEW_LINE> <INDENT> if not self.is_compact(): <NEW_LINE> <INDENT> raise ValueError('can only enumerate points in a compact polyhedron') <NEW_LINE> <DEDENT> lp = self.lattice_polytope(True) <NEW_LINE> try: <NEW_LINE> <INDENT> del lp._points <NEW_LINE> del lp._npoints <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.is_lattice_polytope(): <NEW_LINE> <INDENT> return list(lp.points()) <NEW_LINE> <DEDENT> return [p for p in lp.points() if self.contains(p)]
Return the integral points in the polyhedron using PALP. This method is for testing purposes and will eventually be removed. OUTPUT: The list of integral points in the polyhedron. If the polyhedron is not compact, a ``ValueError`` is raised. EXAMPLES:: sage: Polyhedron(vertices=[(-1,-1),(1,0),(1,1),(0,1)])._integral_points_PALP() [M(-1, -1), M(0, 1), M(1, 0), M(1, 1), M(0, 0)] sage: Polyhedron(vertices=[(-1/2,-1/2),(1,0),(1,1),(0,1)]).lattice_polytope(True).points() M(-1, -1), M(-1, 0), M( 0, -1), M( 1, 1), M( 0, 1), M( 1, 0), M( 0, 0) in 2-d lattice M sage: Polyhedron(vertices=[(-1/2,-1/2),(1,0),(1,1),(0,1)])._integral_points_PALP() [M(1, 1), M(0, 1), M(1, 0), M(0, 0)]
625941bff548e778e58cd4ae
def widgets(self): <NEW_LINE> <INDENT> label_title = Label(self, text='CFIR - Date de notification & confinement') <NEW_LINE> label_title.grid(row=0, column=0, columnspan=2, pady=10) <NEW_LINE> """Date de notification""" <NEW_LINE> label_date = Label(self, text='Date de notification (JJ/MM/AAAA) :') <NEW_LINE> self.entry_date = Entry(self, justify='center') <NEW_LINE> self.var_error = StringVar() <NEW_LINE> self.error = Entry(self, bd=0, bg='#EFEFEF', textvariable=self.var_error, fg='red') <NEW_LINE> label_date.grid(row=1, column=0, padx=10, pady=10) <NEW_LINE> self.entry_date.grid(row=1, column=1) <NEW_LINE> self.error.grid(row=2, column=0) <NEW_LINE> """Bouton 'Calculer'""" <NEW_LINE> btn_calculate = Button(self, text='Calculer', command=self.calculate_date) <NEW_LINE> btn_calculate.grid(row=2, column=1) <NEW_LINE> """Calcul de la date de notification hors confinement""" <NEW_LINE> label_date2 = Label(self, text='Date de notification hors confinement :') <NEW_LINE> self.var_entry_date2 = StringVar() <NEW_LINE> entry_date2 = Entry(self, justify='center', bd=0, bg='#EFEFEF', textvariable=self.var_entry_date2) <NEW_LINE> label_date2.grid(row=3, column=0) <NEW_LINE> entry_date2.grid(row=3, column=1, padx=10, pady=10) <NEW_LINE> """Bouton pour quitter l'application""" <NEW_LINE> btn_exit = Button(self, text='Quitter', command=self.quit) <NEW_LINE> btn_exit.grid(row=4, column=0, columnspan=2, pady=10)
Configuration des widgets
625941bf4527f215b584c38c
def test_base_block_default_terminals(self): <NEW_LINE> <INDENT> default_input = Terminal.get_default_terminal_on_class( Block, TerminalType.input) <NEW_LINE> default_output = Terminal.get_default_terminal_on_class( Block, TerminalType.output) <NEW_LINE> self.assertIsInstance(default_input, Terminal) <NEW_LINE> self.assertIsInstance(default_output, Terminal) <NEW_LINE> self.assertEqual(default_input.id, DEFAULT_TERMINAL) <NEW_LINE> self.assertEqual(default_output.id, DEFAULT_TERMINAL)
Asserts that the base block has default terminals
625941bf5f7d997b871749c7
def retrieve_data(self, hatrac_store): <NEW_LINE> <INDENT> raise NotImplementedError()
Download raw CSV pointcloud data from Hatrac object store and register it. Registered pointclouds are saved to self.n1, self.n2
625941bfd18da76e23532405
def get_copybutton_path(): <NEW_LINE> <INDENT> import easydev <NEW_LINE> try: <NEW_LINE> <INDENT> packagedir = easydev.__path__[0] <NEW_LINE> packagedir = os.path.realpath(pj(packagedir, "share")) <NEW_LINE> os.listdir(packagedir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> packagedir = easydev.__path__[0] <NEW_LINE> packagedir = os.path.realpath(pj(packagedir, "..", "share")) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise IOError("could not find data directory") <NEW_LINE> <DEDENT> <DEDENT> return pj(packagedir, "copybutton.js")
Return the path where the to find the copybutton javascript Copy the copybutton.js javascript in the share directory of easydev so that it is accesible by all packages that required it by typing: .. doctest:: >>> from easydev import get_copybutton_path >>> p = get_copybutton_path() It can then be added with a Sphinx configuration file:: jscopybutton_path = easydev.copybutton.get_copybutton_path()
625941bf15fb5d323cde0a3e
def underlying(self, **config): <NEW_LINE> <INDENT> return self._parse_response(attribute='underlying', **config)
Return the underlying symbol for the contract.
625941bf76e4537e8c3515a3
def drop_least_imp_cols(rf_model, df, n): <NEW_LINE> <INDENT> importance = rf_model.feature_importances_ <NEW_LINE> indices = np.argsort(importance) <NEW_LINE> indices_to_drop = indices[:n] <NEW_LINE> cols_to_drop = np.array(df.columns)[indices_to_drop] <NEW_LINE> df = df.drop(columns=cols_to_drop) <NEW_LINE> return df, cols_to_drop
Drop the n least important features from rf model.
625941bf442bda511e8be34e
def pre_popup_menu_callback(self, *args): <NEW_LINE> <INDENT> state, sensitive = self.shell.props.shell_player.get_playing() <NEW_LINE> if not state: <NEW_LINE> <INDENT> sensitive = False <NEW_LINE> <DEDENT> self.popup.set_sensitive('ev_compact_add_to_playing_menu_item', sensitive) <NEW_LINE> self.popup.set_sensitive('ev_compact_play_next_track_menu_item', sensitive) <NEW_LINE> if not self.external_plugins: <NEW_LINE> <INDENT> self.external_plugins = CreateExternalPluginMenu("ev_compact_entryview", 6, self.popup) <NEW_LINE> self.external_plugins.create_menu('entryview_compact_popup_menu') <NEW_LINE> <DEDENT> self.playlist_menu_item_callback()
Callback when the popup menu is about to be displayed
625941bfa8ecb033257d3001
def test_variants(self): <NEW_LINE> <INDENT> for r, t in self._allRows(): <NEW_LINE> <INDENT> l = langtag(r['Lang_Id']) <NEW_LINE> if t.vars is None and l.vars is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if sorted(t.vars) != sorted(l.vars): <NEW_LINE> <INDENT> self.fail("{Lang_Id} and {likely_subtag} have different variants".format(**r)) <NEW_LINE> <DEDENT> for v in t.vars: <NEW_LINE> <INDENT> if v not in self.iana.variant: <NEW_LINE> <INDENT> self.fail("{likely_subtag} has bad variant {0}".format(v, **r))
Test that all variants are in IANA
625941bfc4546d3d9de72964
def EbookIterator(*args, **kwargs): <NEW_LINE> <INDENT> from calibre.ebooks.oeb.iterator.book import EbookIterator <NEW_LINE> return EbookIterator(*args, **kwargs)
For backwards compatibility
625941bf187af65679ca5050
def addAtHead(self, val): <NEW_LINE> <INDENT> node = ListNode(val) <NEW_LINE> self.size += 1 <NEW_LINE> node.next = self.head <NEW_LINE> self.head = node
Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list. :type val: int :rtype: void
625941bfad47b63b2c509eb2
def clean_subconstituency_id(self): <NEW_LINE> <INDENT> subconstituency_id = self.cleaned_data['subconstituency_id'] <NEW_LINE> try: <NEW_LINE> <INDENT> subconstituency = SubConstituency.objects.get(id=subconstituency_id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise forms.ValidationError(_('Subconstituency does not exist.')) <NEW_LINE> <DEDENT> self.cleaned_data['_subconstituency'] = subconstituency <NEW_LINE> return subconstituency_id
Ensure that the subconstituency_id refers to an existing subconstituency
625941bf10dbd63aa1bd2ad8
def directory_entries_equal(dir1, dir2): <NEW_LINE> <INDENT> dir1_paths, dir2_paths = map(extract_directory_paths, map(ensure_trailing_slashes_in_directory, (dir1, dir2))) <NEW_LINE> return dir1_paths == dir2_paths
Compare two directory entries, considered equal if only the path items match.
625941bf5e10d32532c5ee59
def get_name_info(self): <NEW_LINE> <INDENT> return { "name": self.get_name(), "shortdesc":"", "longdesc": "", "parameters": [], "actions": [], }
Get structured agent's info, only name is populated
625941bf4a966d76dd550f3f
def tankShot (targetBox, obstacleBox, x0, y0, v, theta, g = 9.8): <NEW_LINE> <INDENT> angle= getNumberInput("Give an angle:", [0, np.Inf]) <NEW_LINE> velocity= getNumberInput("Give a velocity:", [0, np.Inf]) <NEW_LINE> (x,y)= trajectory (x0,y0,velocity, angle,g = 9.8) <NEW_LINE> (x,y)= endTrajectoryAtIntersection (x,y,obstacleBox) <NEW_LINE> if firstInBox(x,y,targetBox) >=0: <NEW_LINE> <INDENT> return firstInBox(x,y,targetBox)
executes one tank shot parameters ---------- targetBox : tuple (left,right,bottom,top) location of the target obstacleBox : tuple (left,right,bottom,top) location of the central obstacle x0,y0 :floats origin of the shot v : float velocity of the shot theta : float angle of the shot g : float accel due to gravity (default 9.8) returns -------- int code: 0 = miss, 1 = hit hit if trajectory intersects target box before intersecting obstacle box draws the truncated trajectory in current plot window
625941bfac7a0e7691ed4003
def test_medias_method_with_grappelli(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import grappelli <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> settings.INSTALLED_APPS += ('grappelli', ) <NEW_LINE> self.assertIn('grappelli', settings.INSTALLED_APPS) <NEW_LINE> admin = BandAdmin(Band, self.site) <NEW_LINE> medias = admin.media <NEW_LINE> self.assertTrue(len(medias._css) > 0) <NEW_LINE> self.assertIn('all', medias._css) <NEW_LINE> self.assertTrue(len(medias._css['all']) == 1) <NEW_LINE> self.assertIn('grappelli', medias._css['all'][0])
Tests if the right css ile is triggered when grappelli is installed.
625941bf15baa723493c3ea6
def __init__(self, components=[]): <NEW_LINE> <INDENT> self.compo = components
Initializes an instance of the Vector class. components: a list of components coordinates. It starts with x, y, z, ... and so on.
625941bfd486a94d0b98e077
def getFt(self, b, h): <NEW_LINE> <INDENT> return self.Ft
Return the value of Ft. :param b: section width. :param h: section depth
625941bf23e79379d52ee498
def search_code_keyword_in_note(note, keyword_set, all_code_area): <NEW_LINE> <INDENT> result_list = list() <NEW_LINE> for each_keyword in keyword_set: <NEW_LINE> <INDENT> found = False <NEW_LINE> for each_code in all_code_area: <NEW_LINE> <INDENT> for i, each_line in enumerate(each_code.splitlines()): <NEW_LINE> <INDENT> if str(each_keyword).lower() in each_line.lower(): <NEW_LINE> <INDENT> result_list.append(const.SEARCH_RESULT_INFO(each_keyword, each_line, i + 1)) <NEW_LINE> found = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not found: <NEW_LINE> <INDENT> result_list.clear() <NEW_LINE> <DEDENT> <DEDENT> return result_list
在某篇特定笔记中查找所有代码块, 如果查询的所有关键词都存在, 则加入结果列表 :param note: note 实例 :param keyword_set: set(), 用户查询关键词集合 :param all_code_area: list(), 包含了该笔记中所有的代码块 :return: list(), 搜索结果
625941bf460517430c3940be
def _build_encoder_cell(self, hparams): <NEW_LINE> <INDENT> return model_helper.create_rnn_cell( num_units=hparams.num_units, num_layers=hparams.num_layers, keep_prob=hparams.keep_prob)
Build a multi-layer RNN cell that can be used by encoder.
625941bf5fcc89381b1e15ef
def contact_create_update(contact_object, raise_errors=False, log_events=True, log_transitions=True): <NEW_LINE> <INDENT> cs = contact_synchronizer.ContactSynchronizer( log_events=log_events, log_transitions=log_transitions, raise_errors=raise_errors, ) <NEW_LINE> cs.event('run', contact_object) <NEW_LINE> outputs = list(cs.outputs) <NEW_LINE> del cs <NEW_LINE> logger.info('contact_synchronizer(%r) finished with %d outputs', contact_object, len(outputs)) <NEW_LINE> if not outputs or not outputs[-1] or isinstance(outputs[-1], Exception): <NEW_LINE> <INDENT> if outputs and isinstance(outputs[-1], Exception): <NEW_LINE> <INDENT> logger.error('contact_synchronizer(%r) failed with: %r', contact_object, outputs[-1]) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> logger.info('contact_synchronizer(%r) OK', contact_object) <NEW_LINE> return True
If `epp_id` field is empty, creates a new Contact or Registrant on back-end. Otherwise update existing object from `contact_object` info. Returns False if error happened, or raise Exception if `raise_errors` is True,
625941bf4428ac0f6e5ba724
def set_size(self, text=""): <NEW_LINE> <INDENT> self.size = text
set size of the monster object
625941bf4c3428357757c25c
def set_due_complete(self): <NEW_LINE> <INDENT> self._set_due_complete(True)
Set due complete :return: None
625941bf6fb2d068a760efcd
def __getitem__(self, index: int) -> Tuple[Any, Any]: <NEW_LINE> <INDENT> curr_index = index <NEW_LINE> for x in range(self.total): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> path, target = self.samples[curr_index] <NEW_LINE> sample = self.loader(path) <NEW_LINE> break <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> curr_index = np.random.randint(0, self.total) <NEW_LINE> <DEDENT> <DEDENT> if self.transform is not None: <NEW_LINE> <INDENT> sample = self.transform(sample) <NEW_LINE> <DEDENT> if self.target_transform is not None: <NEW_LINE> <INDENT> target = self.target_transform(target) <NEW_LINE> <DEDENT> return sample, target
Args: index (int): Index Returns: tuple: (sample, target) where target is class_index of the target class.
625941bf21bff66bcd684887
def freeze_freeze(sbox): <NEW_LINE> <INDENT> sbox.build(create_wc=False, read_only=True) <NEW_LINE> second_repo_dir, _ = sbox.add_repo_path('backup') <NEW_LINE> svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", sbox.repo_dir, second_repo_dir) <NEW_LINE> if svntest.main.is_fs_type_fsx() or (svntest.main.is_fs_type_fsfs() and svntest.main.options.server_minor_version < 9): <NEW_LINE> <INDENT> svntest.actions.run_and_verify_svnadmin([], None, 'setuuid', second_repo_dir) <NEW_LINE> <DEDENT> svntest.actions.run_and_verify_svnadmin(None, [], 'freeze', '--', sbox.repo_dir, svntest.main.svnadmin_binary, 'freeze', '--', second_repo_dir, sys.executable, '-c', 'True') <NEW_LINE> arg_file = sbox.get_tempname() <NEW_LINE> svntest.main.file_write(arg_file, "%s\n%s\n" % (sbox.repo_dir, second_repo_dir)) <NEW_LINE> svntest.actions.run_and_verify_svnadmin(None, [], 'freeze', '-F', arg_file, '--', sys.executable, '-c', 'True')
svnadmin freeze svnadmin freeze (some-cmd)
625941bfbe383301e01b53be
def test_messages_destroy(self): <NEW_LINE> <INDENT> client = app.test_client() <NEW_LINE> result = client.delete('/messages/14?user_id=1', follow_redirects=True) <NEW_LINE> self.assertEqual(result.status_code, 404) <NEW_LINE> deleted_message = Message.query.filter(Message.id == 14).first() <NEW_LINE> self.assertEqual(deleted_message, None)
test delete message
625941bf01c39578d7e74d6d
def _parent_choices(self): <NEW_LINE> <INDENT> policies = self.api.list_policies() <NEW_LINE> choices = [('', '----------')] <NEW_LINE> choices.extend( [(p['id'], p['name']) for p in policies if not p['inherits_from']]) <NEW_LINE> return choices
Get a list of parent choices used for inheritance
625941bf796e427e537b04f6
def change_states_of(): <NEW_LINE> <INDENT> pass
This gets called when a poll has finished. It returns a dictionary with proposal uid as key and new state as value. Like: {'<uid>':'approved', '<uid>', 'denied'} It's not required to do, but if it isn't done, the proposals won't change state and you have to do it manually
625941bf7047854f462a133f
def bad_key(): <NEW_LINE> <INDENT> {}['bar']
When run, produces a KeyError.
625941bf07f4c71912b113b3
def update_table_description( self, table_id, description, project_id=bq_default_project(), dataset_id=bq_default_dataset() ): <NEW_LINE> <INDENT> table_ref = self.get_table_ref(dataset_id=dataset_id, table_id=table_id, project_id=project_id) <NEW_LINE> table = self.client.get_table(table_ref) <NEW_LINE> if table.description == description: <NEW_LINE> <INDENT> logging.info("No changes to table description required") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> table.description = description <NEW_LINE> self.client.update_table(table, ["description"]) <NEW_LINE> <DEDENT> except exceptions.BadRequest as error: <NEW_LINE> <INDENT> raise error
Performs a table update to fill in description for the table. Parameters: table_id (string): BigQuery table ID. description (string): The descriptive text to describe the content of the table. project_id (string): BigQuery project ID. dataset_id (string): BigQuery dataset ID. Raises: BadRequest if the update fails.
625941bf9f2886367277a7c2
def rankdata(a, method='average'): <NEW_LINE> <INDENT> if method not in ('average', 'min', 'max', 'dense', 'ordinal'): <NEW_LINE> <INDENT> raise ValueError('unknown method "{0}"'.format(method)) <NEW_LINE> <DEDENT> arr = np.ravel(np.asarray(a)) <NEW_LINE> algo = 'mergesort' if method == 'ordinal' else 'quicksort' <NEW_LINE> sorter = np.argsort(arr, kind=algo) <NEW_LINE> inv = np.empty(sorter.size, dtype=np.intp) <NEW_LINE> inv[sorter] = np.arange(sorter.size, dtype=np.intp) <NEW_LINE> if method == 'ordinal': <NEW_LINE> <INDENT> return inv + 1 <NEW_LINE> <DEDENT> arr = arr[sorter] <NEW_LINE> obs = np.r_[True, arr[1:] != arr[:-1]] <NEW_LINE> dense = obs.cumsum()[inv] <NEW_LINE> if method == 'dense': <NEW_LINE> <INDENT> return dense <NEW_LINE> <DEDENT> count = np.r_[np.nonzero(obs)[0], len(obs)] <NEW_LINE> if method == 'max': <NEW_LINE> <INDENT> return count[dense] <NEW_LINE> <DEDENT> if method == 'min': <NEW_LINE> <INDENT> return count[dense - 1] + 1 <NEW_LINE> <DEDENT> return .5 * (count[dense] + count[dense - 1] + 1)
Assign ranks to data, dealing with ties appropriately. Ranks begin at 1. The `method` argument controls how ranks are assigned to equal values. See [1]_ for further discussion of ranking methods. Parameters ---------- a : array_like The array of values to be ranked. The array is first flattened. method : str, optional The method used to assign ranks to tied elements. The options are 'average', 'min', 'max', 'dense' and 'ordinal'. 'average': The average of the ranks that would have been assigned to all the tied values is assigned to each value. 'min': The minimum of the ranks that would have been assigned to all the tied values is assigned to each value. (This is also referred to as "competition" ranking.) 'max': The maximum of the ranks that would have been assigned to all the tied values is assigned to each value. 'dense': Like 'min', but the rank of the next highest element is assigned the rank immediately after those assigned to the tied elements. 'ordinal': All values are given a distinct rank, corresponding to the order that the values occur in `a`. The default is 'average'. Returns ------- ranks : ndarray An array of length equal to the size of `a`, containing rank scores. References ---------- .. [1] "Ranking", http://en.wikipedia.org/wiki/Ranking Examples -------- >>> from scipy.stats import rankdata >>> rankdata([0, 2, 3, 2]) array([ 1. , 2.5, 4. , 2.5]) >>> rankdata([0, 2, 3, 2], method='min') array([ 1, 2, 4, 2]) >>> rankdata([0, 2, 3, 2], method='max') array([ 1, 3, 4, 3]) >>> rankdata([0, 2, 3, 2], method='dense') array([ 1, 2, 3, 2]) >>> rankdata([0, 2, 3, 2], method='ordinal') array([ 1, 2, 4, 3])
625941bf30c21e258bdfa3ce
def get_engage_group(character): <NEW_LINE> <INDENT> engagegroup = [character] <NEW_LINE> for key in character.db.Combat_Range: <NEW_LINE> <INDENT> if character.db.Combat_Range[key] == 0 and not character.db.Combat_Range[key] in engagegroup and key != character: <NEW_LINE> <INDENT> engagegroup.append(key) <NEW_LINE> <DEDENT> <DEDENT> return engagegroup
Returns a list of the other characters this character is engaged with, including themself.
625941bf24f1403a92600a9b
def alive(self): <NEW_LINE> <INDENT> if not self.stack.parseInner(self.rxPacket): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = self.rxPacket.data <NEW_LINE> body = self.rxPacket.body.data <NEW_LINE> remote = self.stack.remotes[self.reid] <NEW_LINE> if not remote.joined: <NEW_LINE> <INDENT> remote.refresh(alive=None) <NEW_LINE> emsg = "Alivent {0}. Must be joined first\n".format(self.stack.name) <NEW_LINE> console.terse(emsg) <NEW_LINE> self.stack.incStat('unjoined_alive_attempt') <NEW_LINE> self.nack(kind=raeting.pcktKinds.unjoined) <NEW_LINE> return <NEW_LINE> <DEDENT> if not remote.allowed: <NEW_LINE> <INDENT> remote.refresh(alive=None) <NEW_LINE> emsg = "Alivent {0}. Must be allowed first\n".format(self.stack.name) <NEW_LINE> console.terse(emsg) <NEW_LINE> self.stack.incStat('unallowed_alive_attempt') <NEW_LINE> self.nack(kind=raeting.pcktKinds.unallowed) <NEW_LINE> return <NEW_LINE> <DEDENT> if not remote.validRsid(self.sid): <NEW_LINE> <INDENT> emsg = "Stale sid '{0}' in packet\n".format(self.sid) <NEW_LINE> console.terse(emsg) <NEW_LINE> self.stack.incStat('stale_sid_message_attempt') <NEW_LINE> self.remove() <NEW_LINE> return <NEW_LINE> <DEDENT> if self.reid not in self.stack.remotes: <NEW_LINE> <INDENT> msg = "Invalid remote destination estate id '{0}'\n".format(self.reid) <NEW_LINE> console.terse(emsg) <NEW_LINE> self.stack.incStat('invalid_remote_eid') <NEW_LINE> self.remove() <NEW_LINE> return <NEW_LINE> <DEDENT> body = odict() <NEW_LINE> packet = packeting.TxPacket(stack=self.stack, kind=raeting.pcktKinds.ack, embody=body, data=self.txData) <NEW_LINE> try: <NEW_LINE> <INDENT> packet.pack() <NEW_LINE> <DEDENT> except raeting.PacketError as ex: <NEW_LINE> <INDENT> console.terse(str(ex) + '\n') <NEW_LINE> self.stack.incStat("packing_error") <NEW_LINE> self.remove(self.rxPacket.index) <NEW_LINE> return <NEW_LINE> <DEDENT> self.transmit(packet) <NEW_LINE> console.concise("Alivent {0}. Do ack alive at {1}\n".format(self.stack.name, self.stack.store.stamp)) <NEW_LINE> remote.refresh(alive=True) <NEW_LINE> self.remove() <NEW_LINE> console.concise("Alivent {0}. Done at {1}\n".format( self.stack.name, self.stack.store.stamp)) <NEW_LINE> self.stack.incStat("alive_complete")
Process alive packet
625941bf236d856c2ad44709
def str_2_bool(bool_str): <NEW_LINE> <INDENT> lower_str = bool_str.lower() <NEW_LINE> if 'true'.startswith(lower_str) or 'yes'.startswith(lower_str): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'false'.startswith(lower_str) or 'no'.startswith(lower_str): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ToyzError( "'{0}' did not match a boolean expression " " (true/false, yes/no, t/f, y/n)".format(bool_str))
Case independent function to convert a string representation of a boolean (``'true'``/``'false'``, ``'yes'``/``'no'``) into a ``bool``. This is case insensitive, and will also accept part of a boolean string (``'t'``/``'f'``, ``'y'``/``'n'``). Raises a :py:class:`toyz.utils.errors.ToyzError` if an invalid expression is entered.
625941bf656771135c3eb79f
def _make_mock_pubsub_message(task_args): <NEW_LINE> <INDENT> msg = mock.create_autospec(message.Message) <NEW_LINE> msg.data = _make_task_bytes(task_args) <NEW_LINE> msg.message_id = str(uuid.uuid4()) <NEW_LINE> return msg
Returns a mock pubsub message. Args: task_args: Proto message to use as args to the task to put in the pubsub message.
625941bf6e29344779a62547
def split_dictionary(train_path, valid_path=None, test_path=None): <NEW_LINE> <INDENT> source_dic = codecs.open(train_path, "r", "utf-8").readlines() <NEW_LINE> train_dic, valid_dic, test_dic = [], [], [] <NEW_LINE> if valid_path: <NEW_LINE> <INDENT> valid_dic = codecs.open(valid_path, "r", "utf-8").readlines() <NEW_LINE> <DEDENT> if test_path: <NEW_LINE> <INDENT> test_dic = codecs.open(test_path, "r", "utf-8").readlines() <NEW_LINE> <DEDENT> dic = collect_pronunciations(source_dic) <NEW_LINE> for i, word in enumerate(dic): <NEW_LINE> <INDENT> for pronunciations in dic[word]: <NEW_LINE> <INDENT> if i % 20 == 0 and not valid_path: <NEW_LINE> <INDENT> valid_dic.append(word + ' ' + pronunciations) <NEW_LINE> <DEDENT> elif (i % 20 == 1 or i % 20 == 2) and not test_path: <NEW_LINE> <INDENT> test_dic.append(word + ' ' + pronunciations) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> train_dic.append(word + ' ' + pronunciations) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return train_dic, valid_dic, test_dic
Split source dictionary to train, validation and test sets.
625941bf7d847024c06be1ec
def sampleBeatHunterLikelihood(signal, begin, end, referencies, RR_estimat, index, **kwargs): <NEW_LINE> <INDENT> from numpy import mean, correlate, sum, median, diff,array <NEW_LINE> percentil = 0.80 <NEW_LINE> referencies[0] = getPercentil(referencies[0],referencies[1], percentil) <NEW_LINE> alRescate = [correlate(signal[begin:end], model_beat, mode = 'valid') for m, model_beat in enumerate(referencies[0])] <NEW_LINE> alRescate = mean(alRescate, 0) <NEW_LINE> pics = array(_buscaPics(alRescate, int(round(float(end-begin+1)/RR_estimat + 2)), 15, left = 30, right = 30))+10 <NEW_LINE> candidats = _esBatec(signal[begin:end], pics, referencies[0], index) <NEW_LINE> return candidats
referencies[0] media del grupo referencies[1] quantitat elements representats
625941bfd486a94d0b98e078
def test_pretty_changes(self): <NEW_LINE> <INDENT> self.assertEqual( changes.pretty_change({'action': 'DELETE'}), 'Deleted') <NEW_LINE> self.assertEqual( changes.pretty_change({'action': 'RESERVE'}), 'Reserved') <NEW_LINE> self.assertEqual( changes.pretty_change({'action': 'KEEP'}), 'Mentioned but not modified') <NEW_LINE> self.assertEqual( changes.pretty_change({'action': 'DESIGNATE', 'destination': ['123', '43', 'a', '2']}), 'Moved to 123-43-a-2') <NEW_LINE> node = {'text': 'Some Text'} <NEW_LINE> change = {'action': 'PUT', 'node': node} <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'Modified: Some Text') <NEW_LINE> change['action'] = 'POST' <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'Added: Some Text') <NEW_LINE> node['title'] = 'A Title' <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'Added (title: A Title): Some Text') <NEW_LINE> change['action'] = 'PUT' <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'Modified (title: A Title): Some Text') <NEW_LINE> change['field'] = '[title]' <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'Title changed to: A Title') <NEW_LINE> del node['title'] <NEW_LINE> change['field'] = '[a field]' <NEW_LINE> self.assertEqual( changes.pretty_change(change), 'A Field changed to: Some Text')
Verify the output for a variety of "changes"
625941bf4f6381625f114970
def update(self, dict_or_items=None): <NEW_LINE> <INDENT> if dict_or_items is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if isinstance(dict_or_items, dict) and not isinstance(dict_or_items, OrderedDict): <NEW_LINE> <INDENT> if not set(self.keys()) == set(dict_or_items.keys()): <NEW_LINE> <INDENT> raise TypeError('undefined order, cannot get items from dict') <NEW_LINE> <DEDENT> items = dict_or_items.items() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> items = dict_or_items <NEW_LINE> <DEDENT> super(OrderedDictionary, self).update(items)
Update from another OrderedDict or dict or sequence of (key, value) pairs. If provided an unordered dictionary, it is only accepted if the existing keys already exist.
625941bf7b180e01f3dc4735
def maj(self): <NEW_LINE> <INDENT> if self.commande: <NEW_LINE> <INDENT> self.commande.nom_categorie = self.nom_categorie <NEW_LINE> self.commande.aide_courte = self._aide_courte <NEW_LINE> self.commande.aide_longue = str(self.aide_longue) <NEW_LINE> <DEDENT> for cle, couple in self.etats.items(): <NEW_LINE> <INDENT> refus = couple[0] <NEW_LINE> visible = couple[1] <NEW_LINE> actions = couple[2] <NEW_LINE> if cle in importeur.perso.etats: <NEW_LINE> <INDENT> etat = importeur.perso.etats[cle] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> etat = importeur.perso.ajouter_etat(cle) <NEW_LINE> <DEDENT> etat.msg_refus = refus.capitalize() <NEW_LINE> if not etat.msg_refus.endswith((".", "?", "!")): <NEW_LINE> <INDENT> etat.msg_refus += "." <NEW_LINE> <DEDENT> etat.msg_visible = visible.lower().strip(" .?!") <NEW_LINE> etat.act_autorisees = actions.split(" ")
Mise à jour de la commande dynamique.
625941bf38b623060ff0ad21
def get_callgraph(self): <NEW_LINE> <INDENT> cmd = [self.OBJDUMP_BIN, '-d'] + self._objdump_opts + [self.binary] <NEW_LINE> f = sp.run(cmd, stdout=sp.PIPE, check=True).stdout <NEW_LINE> g = defaultdict(set) <NEW_LINE> rc = re.compile(r'<(.*?)>:') <NEW_LINE> rb = re.compile(r'<([^+>]+)') <NEW_LINE> current = None <NEW_LINE> for l in f.decode('ascii').split('\n'): <NEW_LINE> <INDENT> m = rc.search(l) <NEW_LINE> if m: <NEW_LINE> <INDENT> current = m.group(1) <NEW_LINE> g[current] <NEW_LINE> continue <NEW_LINE> <DEDENT> fields = l.split('\t', 2) <NEW_LINE> if len(fields) < 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ia = fields[-1].split() <NEW_LINE> instr = ia[0] <NEW_LINE> trg = ia[-1] <NEW_LINE> if self._is_branch(instr) and '<' in trg: <NEW_LINE> <INDENT> target = rb.search(trg).group(1) <NEW_LINE> if current != target: <NEW_LINE> <INDENT> g[current].add(target) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return g
Return adjacency list of all functions. Leaves are included. Self-references are excluded. Only direct calls are accounted for, indirect calls and storing a pointer to a function are not accounted for.
625941bf6aa9bd52df036cd6
def run_forever(self): <NEW_LINE> <INDENT> return self.gw.run()
Run the client (e.g. the :class:`GatewayClient`) in the current greenlet.
625941bfb5575c28eb68df31
def updateValues(self, station, sweeptparameters=None): <NEW_LINE> <INDENT> def recursiveUpdataValues(qc_inst, data, sweeptparameters): <NEW_LINE> <INDENT> for key, val in data.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> inst_param = qc_inst.parameters[key] <NEW_LINE> value = inst_param.get_latest() <NEW_LINE> try: <NEW_LINE> <INDENT> floatvalue = float(value) <NEW_LINE> if val.get('annotationformatter'): <NEW_LINE> <INDENT> valuestr = val.get( 'annotationformatter').format(floatvalue) <NEW_LINE> <DEDENT> elif floatvalue > 1000 or floatvalue < 0.1: <NEW_LINE> <INDENT> valuestr = "{:.2e}".format(floatvalue) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> valuestr = "{:.2f}".format(floatvalue) <NEW_LINE> <DEDENT> if inst_param in sweeptparameters: <NEW_LINE> <INDENT> val['update'] = True <NEW_LINE> <DEDENT> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> valuestr = str(value) <NEW_LINE> <DEDENT> val['value'] = valuestr <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> subinst = qc_inst.submodules[key] <NEW_LINE> recursiveUpdataValues(subinst, val, sweeptparameters) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for instrument, parameters in self._data.items(): <NEW_LINE> <INDENT> if instrument == 'font': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> qc_inst = station.components[instrument] <NEW_LINE> recursiveUpdataValues(qc_inst, parameters, sweeptparameters)
Update the data with actual voltages from the QDac
625941bf8c3a8732951582eb
def addBefore(self, new_element, node): <NEW_LINE> <INDENT> previous = None <NEW_LINE> current = self.head <NEW_LINE> while current.value != node.value and current.next: <NEW_LINE> <INDENT> previous = current <NEW_LINE> current = current.next <NEW_LINE> <DEDENT> if current.value == node.value: <NEW_LINE> <INDENT> if self.head == node: <NEW_LINE> <INDENT> new_element.next = self.head <NEW_LINE> self.head = new_element <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_element.next = previous.next <NEW_LINE> previous.next = new_element
add new element/item before a position in the Linked List Arguments: - new_element: an object that reference to a new element to be added - node: an object that reference to an object that tells the method to where place the new element/item
625941bfd7e4931a7ee9de50
def test_buckets(): <NEW_LINE> <INDENT> h1 = HistogramBucketDict(guesslogbuckets(0, 500, numbuckets=10)) <NEW_LINE> for x in range(0, 600): <NEW_LINE> <INDENT> h1.add(x) <NEW_LINE> <DEDENT> h2 = HistogramBucketDict(guesslogbuckets(0, 500, numbuckets=30)) <NEW_LINE> for x in range(0, 600): <NEW_LINE> <INDENT> h2.add(x) <NEW_LINE> <DEDENT> h3 = HistogramBucketDict(uniformbuckets(0, 500, numbuckets=10)) <NEW_LINE> for x in range(0, 600): <NEW_LINE> <INDENT> h3.add(x) <NEW_LINE> <DEDENT> h4 = HistogramBucketDict(uniformbuckets(0, 500, numbuckets=30)) <NEW_LINE> for x in range(0, 600): <NEW_LINE> <INDENT> h4.add(x) <NEW_LINE> <DEDENT> print("===") <NEW_LINE> print(h1) <NEW_LINE> print("===") <NEW_LINE> print(h2) <NEW_LINE> print("===") <NEW_LINE> print(h3) <NEW_LINE> print("===") <NEW_LINE> print(h4) <NEW_LINE> print("===") <NEW_LINE> gnuplot_histograms([h1, h2, h3, h4], ['log 10', 'log 30', 'uniform 10', 'uniform 30'])
Test HistogramBucketDict and gnuplot output.
625941bf56ac1b37e6264107
def perform_augmentation(self, sequence_id, coordinates_1, coordinates_2): <NEW_LINE> <INDENT> sequence_boundary = self.sequence_boundaries[sequence_id] <NEW_LINE> result = augment_pair_coordinates(coordinates_1, coordinates_2, randomize_positions=self.randomize_positions, mean_scale_factor=self.mean_scale_factor, p_horizontal_flip=self.p_horizontal_flip ) <NEW_LINE> return result
Augments coordinates for training Arguments: coordinates -- all x coordinates followed by all y coordinates
625941bf63b5f9789fde7018
def returnLatAndLong(self, receivedJson): <NEW_LINE> <INDENT> return receivedJson["entry"][0]["messaging"][0].get("message").get("attachments")[0].get("payload").get("coordinates")
get the latitude and longitude received from location quick reply Arguments: receivedJson {json} -- received json Returns: json -- latitude and longitude
625941bf23849d37ff7b2fc3
def testInterfaceFunctions(self): <NEW_LINE> <INDENT> ap = KMCAnalysisPlugin() <NEW_LINE> self.assertTrue( hasattr(ap, "setup") ) <NEW_LINE> self.assertTrue( hasattr(ap, "finalize") ) <NEW_LINE> self.assertTrue( hasattr(ap, "registerStep") ) <NEW_LINE> ap.setup("time-dummy", "step-dummy", "config-dummy") <NEW_LINE> ap.registerStep("time-dummy", "step-dummy", "config-dummy") <NEW_LINE> ap.finalize()
Test that we can construct a derrived class.
625941bf5510c4643540f31d
def main(arguments): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) <NEW_LINE> parser.add_argument('infile', help='Input file', action=check_ext({'ipv4', 'ipv6'})) <NEW_LINE> parser.add_argument("outfile", help="Output file", type=argparse.FileType("w")) <NEW_LINE> parser.add_argument('blacklist_file', help='Blacklist file',) <NEW_LINE> parser.add_argument("ip_type", choices=['ipv4', 'ipv6']) <NEW_LINE> args = parser.parse_args(arguments) <NEW_LINE> infile = args.infile <NEW_LINE> outfile = args.outfile <NEW_LINE> ip_type = args.ip_type <NEW_LINE> blacklist_file = args.blacklist_file <NEW_LINE> shuf_file = parse_input_file(infile) <NEW_LINE> datafile = split_data(shuf_file, ip_type, blacklist_file) <NEW_LINE> live_hosts = live_host_check(datafile, ip_type) <NEW_LINE> general_service_discovery(live_hosts, outfile, ip_type)
Input files should be structured like this: IPv4 files : ip:port extension : .ipv4 IPv6 files : ip.port extension : .ipv6
625941bf92d797404e3040bc
def generate_json(json_schema): <NEW_LINE> <INDENT> dict_ = {} <NEW_LINE> for key, value in json_schema.items(): <NEW_LINE> <INDENT> generator_func = "fake." + value + "()" <NEW_LINE> dict_[key] = eval(generator_func) <NEW_LINE> <DEDENT> return json.dumps(dict_)
Function to generate json.
625941bf596a8972360899f6
def OnLoadCoordFromFile(self, event): <NEW_LINE> <INDENT> dlg = wx.FileDialog(self, message="Choose a file", defaultDir=os.getcwd(), defaultFile="", wildcard="Text Files (.txt, .dat)|*.txt;*.dat", style=wx.OPEN | wx.CHANGE_DIR) <NEW_LINE> if dlg.ShowModal() == wx.ID_OK: <NEW_LINE> <INDENT> path = dlg.GetPath() <NEW_LINE> dataFile = open(path, 'r') <NEW_LINE> coord = [] <NEW_LINE> for (i, line) in enumerate(dataFile): <NEW_LINE> <INDENT> coordLine = line.split() <NEW_LINE> coord += [[float(coordLine[0]) * self.coordFactor, float(coordLine[1]) * self.coordFactor, float(coordLine[2]) * self.coordFactor]] <NEW_LINE> <DEDENT> self.labelCoordMean[self.curLabel], self.labelCoordStd[self.curLabel] = AverageCoordCalc(coord) <NEW_LINE> self.UpdateInputTable() <NEW_LINE> self.inputAccepted = False <NEW_LINE> self.statusbar.SetStatusText('Coordinates of label no. ' + str(self.curLabel + 1) + ' were imported from data file! Click "Accept" button after all input data has been entered.') <NEW_LINE> <DEDENT> event.Skip()
Loads label coordinates from data file
625941bf15fb5d323cde0a3f
def get(self, session, **filters): <NEW_LINE> <INDENT> model = session.query(self.model_class).filter_by(**filters).first() <NEW_LINE> if not model: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return model.to_data_model()
Retrieves an entity from the database. :param session: A Sql Alchemy database session. :param filters: Filters to decide which entity should be retrieved. :returns: octavia.common.data_model
625941bf82261d6c526ab3cf
def get_store_dsn(self, filter_password=False): <NEW_LINE> <INDENT> return self._build_dsn(self.dbname, filter_password=filter_password)
Returns a uri representing the current database settings. It's used by the orm to connect to a database. :param filter_password: if the password should be filtered out :returns: a string like postgresql://username@localhost/dbname
625941bf99cbb53fe6792b1a
def __setitem__(self, k, v): <NEW_LINE> <INDENT> self.root = self._put_to_node(self.root, k, v)
>>> s = BSTKVStore() >>> s[2] = 'b' >>> s.root Node(2, 'b') >>> s[1] = 'a' >>> s.root.left Node(1, 'a') >>> s[3] = 'c' >>> s.root.right Node(3, 'c')
625941bfcb5e8a47e48b79e1
def GetCommandArgs(self): <NEW_LINE> <INDENT> return self.command + self.common_args + self.test_args
Return the complete command-line arguments as a list.
625941bf7d43ff24873a2bd1
def all_completed(futures): <NEW_LINE> <INDENT> group = FutureGroup(futures) <NEW_LINE> return group.all_completed
Return a new Future that completes when all the futures in the 'futures' list given as argument are completed. The new Futures returns the list of results, or sets 'OneOrMoreFuturesFailed' if one or more futures failed.
625941bf167d2b6e31218ac9
def __init__(self): <NEW_LINE> <INDENT> object.__init__(self) <NEW_LINE> self.level = None <NEW_LINE> self.section = None <NEW_LINE> self.floor_empty = None <NEW_LINE> self.floor_rock = None <NEW_LINE> self.wall_empty = None <NEW_LINE> self.wall_ground = None <NEW_LINE> self.rng = random.Random()
Default constructor
625941bf9f2886367277a7c3
def random_list(my_list, list_size): <NEW_LINE> <INDENT> while len(my_list) < list_size: <NEW_LINE> <INDENT> number = random.randint(0, list_size) <NEW_LINE> my_list.append(number) <NEW_LINE> <DEDENT> return my_list
Generate a list with random numbers. Args: my_list (list): empty list to be filled list_size (int): value to determine size of list Returns: my_list (list): list of random value with size determined by list_size value
625941bfde87d2750b85fcc3
def random_mini_batches(X, Y, mini_batch_size): <NEW_LINE> <INDENT> m = X.shape[0] <NEW_LINE> mini_batches = [] <NEW_LINE> permutation = list(np.random.permutation(m)) <NEW_LINE> shuffled_X = X[permutation,:,:,:] <NEW_LINE> shuffled_Y = Y[permutation,:] <NEW_LINE> num_complete_minibatches = math.floor(m/mini_batch_size) <NEW_LINE> for k in range(0, num_complete_minibatches): <NEW_LINE> <INDENT> mini_batch_X = shuffled_X[k * mini_batch_size : k * mini_batch_size + mini_batch_size,:,:,:] <NEW_LINE> mini_batch_Y = shuffled_Y[k * mini_batch_size : k * mini_batch_size + mini_batch_size,:] <NEW_LINE> mini_batch = (mini_batch_X, mini_batch_Y) <NEW_LINE> mini_batches.append(mini_batch) <NEW_LINE> <DEDENT> if m % mini_batch_size != 0: <NEW_LINE> <INDENT> mini_batch_X = shuffled_X[num_complete_minibatches * mini_batch_size : m,:,:,:] <NEW_LINE> mini_batch_Y = shuffled_Y[num_complete_minibatches * mini_batch_size : m,:] <NEW_LINE> mini_batch = (mini_batch_X, mini_batch_Y) <NEW_LINE> mini_batches.append(mini_batch) <NEW_LINE> <DEDENT> return mini_batches
Creates a list of random minibatches from (X, Y) Arguments: X -- input data, of shape (input size, number of examples) (m, Hi, Wi, Ci) Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples) (m, n_y) mini_batch_size - size of the mini-batches, integer Returns: mini_batches -- list of synchronous (mini_batch_X, mini_batch_Y)
625941bf1f5feb6acb0c4a87
def model(pDict, lamSqArr_m2): <NEW_LINE> <INDENT> pArr1 = pDict["fracPol1"] * np.ones_like(lamSqArr_m2) <NEW_LINE> pArr2 = pDict["fracPol2"] * np.ones_like(lamSqArr_m2) <NEW_LINE> quArr1 = pArr1 * np.exp( 2j * (np.radians(pDict["psi01_deg"]) + pDict["RM1_radm2"] * lamSqArr_m2)) <NEW_LINE> quArr2 = pArr2 * np.exp( 2j * (np.radians(pDict["psi02_deg"]) + pDict["RM2_radm2"] * lamSqArr_m2)) <NEW_LINE> quArr = (quArr1 + quArr2) * np.exp(-2.0 * pDict["sigmaRM_radm2"]**2.0 * lamSqArr_m2**2.0) <NEW_LINE> return quArr
Two separate Faraday components, averaged within same telescope beam (i.e., unresolved), with a common Burn depolarisation term.
625941bf91f36d47f21ac423
def test_solar_irradiance_atlanta_georgia(): <NEW_LINE> <INDENT> loc = Location(latitude=33.64, longitude=84.43, timezone=-5) <NEW_LINE> sol = SolarDesignDay( location=loc, timeperiod=DESIGN_DAYS, tau_b=np.array( ( 0.334, 0.324, 0.355, 0.383, 0.379, 0.406, 0.440, 0.427, 0.388, 0.358, 0.354, 0.335, ) ), tau_d=np.array( ( 2.614, 2.580, 2.474, 2.328, 2.324, 2.270, 2.202, 2.269, 2.428, 2.514, 2.523, 2.618, ) ), ) <NEW_LINE> paper_beam = np.array((884, 932, 922, 904, 907, 877, 846, 852, 875, 879, 847, 861)) <NEW_LINE> paper_diffuse = np.array((77, 87, 105, 127, 129, 136, 144, 133, 109, 92, 84, 73)) <NEW_LINE> assert np.allclose( np.round(sol._beam_normal_irradiance[:, 12]), paper_beam, rtol=0, atol=2 ) <NEW_LINE> assert np.allclose( np.round(sol._diffuse_horizontal_irradiance[:, 12]), paper_diffuse, rtol=0, atol=1, )
Test the calculation of solar irradiance.
625941bf97e22403b379cecc
def setUp(self): <NEW_LINE> <INDENT> self.component_manager = core.ComponentManager() <NEW_LINE> self.component = TestDriver(self.component_manager)
sets up the environment
625941bf596a8972360899f7
def nvmlSystemGetTopologyGpuSet(cpuNumber): <NEW_LINE> <INDENT> c_count = c_uint(0) <NEW_LINE> fn = _nvmlGetFunctionPointer("nvmlSystemGetTopologyGpuSet") <NEW_LINE> ret = fn(cpuNumber, byref(c_count), None) <NEW_LINE> if ret != NVML_SUCCESS: <NEW_LINE> <INDENT> raise NVMLError(ret) <NEW_LINE> <DEDENT> print(c_count.value) <NEW_LINE> device_array = c_nvmlDevice_t * c_count.value <NEW_LINE> c_devices = device_array() <NEW_LINE> ret = fn(cpuNumber, byref(c_count), c_devices) <NEW_LINE> _nvmlCheckReturn(ret) <NEW_LINE> return list(c_devices[0:c_count.value])
/** * Retrieve the set of GPUs that have a CPU affinity with the given CPU number * For all products. * Supported on Linux only. * * @param cpuNumber The CPU number * @param count When zero, is set to the number of matching GPUs such that \a deviceArray * can be malloc'd. When non-zero, \a deviceArray will be filled with \a count * number of device handles. * @param deviceArray An array of device handles for GPUs found with affinity to \a cpuNumber * * @return * - \ref NVML_SUCCESS if \a deviceArray or \a count (if initially zero) has been set * - \ref NVML_ERROR_INVALID_ARGUMENT if \a cpuNumber, or \a count is invalid, or \a deviceArray is NULL with a non-zero \a count * - \ref NVML_ERROR_NOT_SUPPORTED if the device or OS does not support this feature * - \ref NVML_ERROR_UNKNOWN an error has occurred in underlying topology discovery */ nvmlReturn_t DECLDIR nvmlSystemGetTopologyGpuSet
625941bfdd821e528d63b0de
def setup_platform(hass, config, add_devices, discovery_info=None): <NEW_LINE> <INDENT> entity_id = config.get(CONF_ENTITY_ID) <NEW_LINE> start = config.get(CONF_START) <NEW_LINE> end = config.get(CONF_END) <NEW_LINE> duration = config.get(CONF_DURATION) <NEW_LINE> name = config.get(CONF_NAME) <NEW_LINE> unit = config.get(CONF_UNIT_OF_MEASUREMENT) <NEW_LINE> for template in [start, end]: <NEW_LINE> <INDENT> if template is not None: <NEW_LINE> <INDENT> template.hass = hass <NEW_LINE> <DEDENT> <DEDENT> add_devices([HistoryAverageSensor(hass, entity_id, start, end, duration, name, unit)]) <NEW_LINE> return True
Set up the HistoryAverage sensor.
625941bf8a349b6b435e80a7
def exp(x, x_min=EXP_MIN, x_max=EXP_MAX): <NEW_LINE> <INDENT> from numpy import exp, clip <NEW_LINE> x_min = max(x_min, EXP_MIN) <NEW_LINE> x_max = min(x_max, EXP_MAX) <NEW_LINE> return exp(clip(x, x_min, x_max))
Save version of exp, clips argument such that overflow does not occur. @param x: input @type x: numpy array or float or int @param x_min: lower value for clipping @type x_min: float @param x_max: upper value for clipping @type x_max: float
625941bf090684286d50ec16
def status(self, which): <NEW_LINE> <INDENT> stats = nest.GetConnections(self.pre_neuron, synapse_model = self.synapse_model) <NEW_LINE> return nest.GetStatus(stats, [which])[0][0]
Get synapse parameter status.
625941bf99cbb53fe6792b1b
def start(self, output, motion_output=None): <NEW_LINE> <INDENT> self.frame = PiVideoFrame( index=0, frame_type=None, frame_size=0, video_size=0, split_size=0, timestamp=0, complete=False, ) <NEW_LINE> if motion_output is not None: <NEW_LINE> <INDENT> self._open_output(motion_output, PiVideoFrameType.motion_data) <NEW_LINE> <DEDENT> super(PiVideoEncoder, self).start(output)
Extended to initialize video frame meta-data tracking.
625941bfe76e3b2f99f3a744
@pytest.mark.parametrize("data", getdata(data_file, "test_get_hashrate")) <NEW_LINE> def test_get_hashrate(data): <NEW_LINE> <INDENT> res = get_hashrate(data["reqdata"]) <NEW_LINE> verify_code_msg(res, data["expdata"])
获取账户实时算力
625941bf3c8af77a43ae36d2
def show(self, **kwargs): <NEW_LINE> <INDENT> return self.api_request(self._get_method_fullname("show"), kwargs)
Shows the details for the specified auth audit. **Inputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` True | ``default:`` None :param id: The internal NetMRI identifier of this user audit log information. :type id: Integer **Outputs** | ``api version min:`` None | ``api version max:`` None | ``required:`` False | ``default:`` None :return auth_audit: The auth audit identified by the specified id. :rtype auth_audit: AuthAudit
625941bf0a366e3fb873e74c
def _check_crs(left, right, allow_none=False): <NEW_LINE> <INDENT> if allow_none: <NEW_LINE> <INDENT> if not left.crs or not right.crs: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if not left.crs == right.crs: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Check if the projection of both arrays is the same. If allow_none is True, empty CRS is treated as the same.
625941bf9b70327d1c4e0d08
def __init__(self, filename, directory): <NEW_LINE> <INDENT> super().__init__(filename, directory) <NEW_LINE> self.attribute_name = 'sub_experiment_id' <NEW_LINE> self.attribute_visibility = 'global' <NEW_LINE> self.attribute_type = 'c'
Initialise the class :param str filename: The basename of the file to process. :param str directory: The directory that the file is currently in.
625941bfd4950a0f3b08c285
def includeme(config): <NEW_LINE> <INDENT> settings = config.get_settings() <NEW_LINE> config.include('pyramid_tm') <NEW_LINE> session_factory = get_session_factory(get_engine(settings)) <NEW_LINE> config.registry['dbsession_factory'] = session_factory <NEW_LINE> config.add_request_method( lambda r: get_tm_session(session_factory, r.tm), 'dbsession', reify=True )
Initialize the model for a Pyramid app. Activate this setup using ``config.include('myproject.models')``.
625941bf004d5f362079a269
def extractProjectPrefix(files): <NEW_LINE> <INDENT> commonprefix = os.path.commonprefix(files) <NEW_LINE> if not commonprefix or not commonprefix.endswith("."): <NEW_LINE> <INDENT> print(red("Can't extract project name from files: %s" % ", ".join(files), bold=True)) <NEW_LINE> print(red("Please ensure that all files have a common filename and only differ in their extension!", bold=True)) <NEW_LINE> print(red("Example: MyBoard.top, MyBoard.bot, ...", bold=True)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return commonprefix[:-1]
Extract a common project prefix from all files in a directory Fails & exits if no such prefix is found Example: [ABC.top, ABC.bot] => "ABC"
625941bff548e778e58cd4b0
def init(self): <NEW_LINE> <INDENT> if not super(IFCTEXTLITERALWITHEXTENT,self).init(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> arg = self.args.getNext() <NEW_LINE> if not isUnset(arg): <NEW_LINE> <INDENT> self.Extent= spfToId(arg) <NEW_LINE> <DEDENT> arg = self.args.getNext() <NEW_LINE> if not isUnset(arg): <NEW_LINE> <INDENT> self.BoxAlignment= fromSPF(arg) <NEW_LINE> <DEDENT> return True
get every argument's value and parse inverses
625941bf6e29344779a62548
def tau_plus_minus(self): <NEW_LINE> <INDENT> ct = self.cartan_type() <NEW_LINE> L,R = ct.index_set_bipartition() <NEW_LINE> return self.tau_epsilon_operator_on_almost_positive_roots(L), self.tau_epsilon_operator_on_almost_positive_roots(R)
Returns the `\tau^+` and `\tau^-` piecewise linear operators on ``self`` Those operators are induced by the bipartition `\{L,R\}` of the simple roots of ``self``, and stabilize the almost positive roots. Namely, `\tau_+` fixes the negative simple roots `\alpha_i` for `i` in `R`, and acts otherwise by: .. math:: \tau_+( \beta ) = (\prod_{i \in L} s_i) (\beta) `\tau_-` acts analogously, with `L` and `R` interchanged. Those operators are used to construct the associahedron, a polytopal realization of the cluster complex (see :class:`Associahedron`). .. seealso:: :meth:`tau_epsilon_operator_on_almost_positive_roots` EXAMPLES: We explore the example of [CFZ]_ Eq.(1.3):: sage: S = RootSystem(['A',2]).root_lattice() sage: taup, taum = S.tau_plus_minus() sage: for beta in S.almost_positive_roots(): print beta, ",", taup(beta), ",", taum(beta) -alpha[1] , alpha[1] , -alpha[1] alpha[1] , -alpha[1] , alpha[1] + alpha[2] alpha[1] + alpha[2] , alpha[2] , alpha[1] -alpha[2] , -alpha[2] , alpha[2] alpha[2] , alpha[1] + alpha[2] , -alpha[2]
625941bf3cc13d1c6d3c72af
def MakeChangeLog(self, commit): <NEW_LINE> <INDENT> final = [] <NEW_LINE> process_it = self.get('process_log', '').split(',') <NEW_LINE> process_it = [item.strip() for item in process_it] <NEW_LINE> need_blank = False <NEW_LINE> for change in sorted(self.changes, reverse=True): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for this_commit, text in self.changes[change]: <NEW_LINE> <INDENT> if commit and this_commit != commit: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if 'uniq' not in process_it or text not in out: <NEW_LINE> <INDENT> out.append(text) <NEW_LINE> <DEDENT> <DEDENT> line = 'Changes in v%d:' % change <NEW_LINE> have_changes = len(out) > 0 <NEW_LINE> if 'sort' in process_it: <NEW_LINE> <INDENT> out = sorted(out) <NEW_LINE> <DEDENT> if have_changes: <NEW_LINE> <INDENT> out.insert(0, line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = [line + ' None'] <NEW_LINE> <DEDENT> if need_blank: <NEW_LINE> <INDENT> out.insert(0, '') <NEW_LINE> <DEDENT> final += out <NEW_LINE> need_blank = have_changes <NEW_LINE> <DEDENT> if self.changes: <NEW_LINE> <INDENT> final.append('') <NEW_LINE> <DEDENT> return final
Create a list of changes for each version. Return: The change log as a list of strings, one per line Changes in v4: - Jog the dial back closer to the widget Changes in v3: None Changes in v2: - Fix the widget - Jog the dial etc.
625941bfd58c6744b4257b94
def run(Model, module_size=0, option="random", scipy=False): <NEW_LINE> <INDENT> print("Reading data...") <NEW_LINE> y, X, tag = readData(config.TRAIN_FILE, return_scipy=scipy) <NEW_LINE> testY, testX, _ = readData(config.TEST_FILE, return_scipy=scipy) <NEW_LINE> train = TrainWrapper(Model, config.LIBLINEAR_TRAINING_OPTIONS) <NEW_LINE> print("Reading data completed.") <NEW_LINE> if module_size > 0: <NEW_LINE> <INDENT> sort_tag = 0 if option == 'class' else (1 if option == 'nothing' else 2) <NEW_LINE> posXs, negXs = divide(tag, X, module_size, sort_tag=sort_tag) <NEW_LINE> print("Dividing completed.") <NEW_LINE> Xs, Ys = getData(posXs, negXs) <NEW_LINE> minmax_shape = (len(posXs), len(negXs)) <NEW_LINE> print("minmax shape: {}".format(minmax_shape)) <NEW_LINE> plabels, pvals = list(zip(*parallel_train(train, Xs, Ys, testX=testX))) <NEW_LINE> plabel, pval = minmax(pvals, plabels, minmax_shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plabel, pval = train(X, y, testX=testX) <NEW_LINE> <DEDENT> assert len(plabel) == len(testY) <NEW_LINE> total = len(testY) <NEW_LINE> hit = 0 <NEW_LINE> for idx, label in enumerate(plabel): <NEW_LINE> <INDENT> if testY[idx] == label: <NEW_LINE> <INDENT> hit += 1 <NEW_LINE> <DEDENT> <DEDENT> print("Accuracy = {:.2f} ({}/{})".format(hit*100/total, hit, total)) <NEW_LINE> saveResult("{}-{}-{}".format(Model.__name__, option, module_size), plabel, pval)
run liblinear training :param module_size: the data module for size for each model, 0 for no decomposition :param option: class, nothing, random :return:
625941bff9cc0f698b140532
def serialize_model_semantics(args, architecture_hd5): <NEW_LINE> <INDENT> semantics = { 'id': args.id, 'output_labels': args.labels, 'architecture': os.path.basename(architecture_hd5), 'input_symbols': args.input_symbols, 'model_version': args.model_version, 'gatk_version': args.gatk_version, } <NEW_LINE> if args.tensor_name: <NEW_LINE> <INDENT> semantics['input_tensor_map_name'] = args.tensor_name <NEW_LINE> semantics['input_tensor_map'] = tensor_maps.get_tensor_channel_map_from_args(args) <NEW_LINE> semantics['window_size'] = args.window_size <NEW_LINE> semantics['read_limit'] = args.read_limit <NEW_LINE> <DEDENT> if args.annotation_set and args.annotation_set != '_': <NEW_LINE> <INDENT> semantics['input_annotations'] = args.annotations <NEW_LINE> semantics['input_annotation_set'] = args.annotation_set <NEW_LINE> <DEDENT> if args.data_dir: <NEW_LINE> <INDENT> semantics['data_dir'] = args.data_dir <NEW_LINE> <DEDENT> semantics['channels_last'] = args.channels_last <NEW_LINE> json_file_name = args.output_dir + args.id + '.json' <NEW_LINE> with open(json_file_name, 'w') as outfile: <NEW_LINE> <INDENT> json.dump(semantics, outfile) <NEW_LINE> <DEDENT> print('Saved model semantics at:', json_file_name)
Save a json file specifying model semantics, I/O contract. Arguments args.tensor_name: String which indicates tensor map to use (from defines.py) or None args.window_size: sites included in the tensor map args.read_limit: Maximum reads included in the tensor map args.annotations: List of annotations or None args.id: the id of the run will be the name of the semantics file architecture_hd5: Keras model and weights hd5 file (created with save_model())
625941bf283ffb24f3c55838
def gnomesort(list_): <NEW_LINE> <INDENT> position = 0 <NEW_LINE> while position < len(list_) - 1: <NEW_LINE> <INDENT> i = position <NEW_LINE> if list_[i] <= list_[i + 1]: <NEW_LINE> <INDENT> position += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> list_[i], list_[i + 1] = list_[i + 1], list_[i] <NEW_LINE> if position != 0: <NEW_LINE> <INDENT> position -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> position += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return list_
Gnomesort - TODO: Explain idea Parameters ---------- list_ : list Sort this one Returns ------- list Sorted list
625941bfe1aae11d1e749be9
def call_on_exit(self): <NEW_LINE> <INDENT> self.on_exit(self.out_data, self.err_data, self.reason)
Call on_exit after processing all twisted events. When e.g. the iperf port is already in use, processExited happens before errReceived gets "bind failed: Address already in use". Using callLater, on_exit is able to properly get and display stderr.
625941bfa8370b77170527d5
def read_spec(self, test_name): <NEW_LINE> <INDENT> spec = self.nettests.read_one(test_name) <NEW_LINE> if not spec: <NEW_LINE> <INDENT> logging.warning("Cannot read nettest %s spec", test_name) <NEW_LINE> <DEDENT> return spec
Reads the specification of a test
625941bfa8ecb033257d3002
def inorder(node): <NEW_LINE> <INDENT> if node.isLeaf(): <NEW_LINE> <INDENT> itemlist.extend(node.items) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(node.size()): <NEW_LINE> <INDENT> inorder(node.children[i]) <NEW_LINE> itemlist.append(node.items[i]) <NEW_LINE> <DEDENT> inorder(node.children[-1])
recursively fills itemlist with items inorder
625941bfa17c0f6771cbdf87
def execute(self, request): <NEW_LINE> <INDENT> url = request.uri <NEW_LINE> if request.parameters: <NEW_LINE> <INDENT> url += '?' + urlencode(request.parameters) <NEW_LINE> <DEDENT> if request.headers: <NEW_LINE> <INDENT> headers = dict(self._headers, **request.headers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> headers = self._headers <NEW_LINE> <DEDENT> kwargs = dict( method=Method._VALUES_TO_NAMES[request.method], url=url, body=request.body, headers=headers, timeout=self._timeout, ) <NEW_LINE> retry = 0 <NEW_LINE> server = getattr(self._local, "server", None) <NEW_LINE> while True: <NEW_LINE> <INDENT> if not server: <NEW_LINE> <INDENT> self._local.server = server = self._get_server() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> parse_result = urlparse(server) <NEW_LINE> conn = POOL_MANAGER.connection_from_host(parse_result.hostname, parse_result.port, parse_result.scheme) <NEW_LINE> response = conn.urlopen(**kwargs) <NEW_LINE> return RestResponse(status=response.status, body=response.data, headers=response.headers) <NEW_LINE> <DEDENT> except urllib3.exceptions.HTTPError: <NEW_LINE> <INDENT> self._drop_server(server) <NEW_LINE> self._local.server = server = None <NEW_LINE> if retry >= self._max_retries: <NEW_LINE> <INDENT> logger.error("Client error: bailing out after %d failed retries", self._max_retries, exc_info=1) <NEW_LINE> raise NoServerAvailable <NEW_LINE> <DEDENT> logger.exception("Client error: %d retries left", self._max_retries - retry) <NEW_LINE> retry += 1
Execute a request and return a response
625941bfdd821e528d63b0df
def SailForces(self, thetaw, vw, deltas): <NEW_LINE> <INDENT> alphas = -Norm(thetaw + deltas + np.pi) <NEW_LINE> atanC, deltaatan = self.sail.atanClCd(alphas) <NEW_LINE> Fs, deltaFs = self.sail.F(alphas, vw) <NEW_LINE> gammas = Norm(atanC - thetaw) <NEW_LINE> deltaFs = deltaFs * -1.0 <NEW_LINE> deltagamma = deltaatan * -1.0 <NEW_LINE> return Fs, gammas, deltaFs, deltagamma
Calculates and returns forces from the sail. Arguments: thetaw: Wind, 0 = running downwind, +pi / 2 = wind from port vw: Wind speed, m / s deltas: Sail angle, 0 = all in, +pi / 2 = sail on starboard heel: Boat heel, 0 = upright Returns: Fs: Magnitude of force from sail (N) gammas: Angle of force from sail (rad, 0 = forwards, +pi / 2 = pushing to port) deltaFs: Derivative of Fs w.r.t. deltas deltagamma: Derivative of gamma w.r.t. deltas
625941bf4a966d76dd550f41
def getMeterInfo(self, filterJson=None, useFilter: bool=False): <NEW_LINE> <INDENT> self.getAllMeterInfo() <NEW_LINE> meters = self.METER_JSONCOLLECTION <NEW_LINE> tempFilteredMeter=[] <NEW_LINE> if not useFilter or filterJson is None or len(filterJson)==0: <NEW_LINE> <INDENT> self.METER_JSONCOLLECTION = meters <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for meter in meters['meters']: <NEW_LINE> <INDENT> if self.__compareMeterWithFilter(meter, filterJson): <NEW_LINE> <INDENT> tempFilteredMeter.append(meter) <NEW_LINE> <DEDENT> <DEDENT> meters['meters']=tempFilteredMeter <NEW_LINE> self.METER_JSONCOLLECTION = meters <NEW_LINE> <DEDENT> return self
Return `getAllMeterInfo()` but selected meters is based on filter
625941bf21a7993f00bc7c20
def reverse_transform(self, lvar_lname, rvar_lname, indices, adjust_hdim=None, flip_vdim=None): <NEW_LINE> <INDENT> if self.has_dim_transforms: <NEW_LINE> <INDENT> dtrans = self.__dim_transforms <NEW_LINE> lhs_term = dtrans.reverse_transform(lvar_lname, indices, adjust_hdim=adjust_hdim, flip_vdim=flip_vdim) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lhs_term = f"{lvar_lname}({','.join(indices)})" <NEW_LINE> <DEDENT> rhs_term = f"{rvar_lname}({','.join(indices)})" <NEW_LINE> if self.has_kind_transforms: <NEW_LINE> <INDENT> kind = self.__kind_transforms[0] <NEW_LINE> rhs_term = f"real({rhs_term}, {kind})" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kind = '' <NEW_LINE> <DEDENT> if self.has_unit_transforms: <NEW_LINE> <INDENT> if kind: <NEW_LINE> <INDENT> kind = "_" + kind <NEW_LINE> <DEDENT> elif self.__v1_kind: <NEW_LINE> <INDENT> kind = "_" + self.__v1_kind <NEW_LINE> <DEDENT> rhs_term = self.__unit_transforms[1].format(var=rhs_term, kind=kind) <NEW_LINE> <DEDENT> return f"{lhs_term} = {rhs_term}"
Compute and return the the reverse transform from "var2" to "var1". <lvar_lname> is the local name of "var1". <rvar_lname> is the local name of "var2". <indices> is a tuple of the loop indices for "var2" (i.e., "var2" will show up in the RHS of the transform as "var2(indices)". If <adjust_hdim> is not None, it should be a string containing the local name of the "horizontal_loop_begin" variable. This is used to compute the offset in the horizontal axis index between one and "horizontal_loop_begin" (if any). This occurs when one of the variables has extent "horizontal_loop_extent" and the other has extent "horizontal_dimension". If flip_vdim is not None, it should be a string containing the local name of the vertical extent of the vertical axis for "var1" and "var2" (i.e., "vertical_layer_dimension" or "vertical_interface_dimension").
625941bf8e71fb1e9831d6de
def publish_to_consortium(self, id, dataset, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.publish_to_consortium_with_http_info(id, dataset, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.publish_to_consortium_with_http_info(id, dataset, **kwargs) <NEW_LINE> return data
initiates a batch job to extract and publish a dataset and knowledge graph # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.publish_to_consortium(id, dataset, async_req=True) >>> result = thread.get() :param async_req bool :param str id: consortium id (required) :param str dataset: dataset id (required) :return: SubmitJobResult If the method is called asynchronously, returns the request thread.
625941bf925a0f43d2549da9
def project_to(self, var_list=[]): <NEW_LINE> <INDENT> var_list = colloc.intersect(ascertain_list(var_list), self.vars()) <NEW_LINE> if var_list: <NEW_LINE> <INDENT> return Pot(self.tb[var_list + ['pval']].groupby(var_list).sum().reset_index()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Pot(pd.DataFrame({'pval': self.tb['pval'].sum()}, index=['']))
project to a subset of variables (marginalize out other variables)
625941bfd486a94d0b98e079
def __init__(self, name, description): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.owner = 'owner' <NEW_LINE> self.version = '1' <NEW_LINE> self.suites = []
high level support for doing this and that.
625941bfa05bb46b383ec758
def set_queued(self, queued=True): <NEW_LINE> <INDENT> is_queued = self.is_queued() <NEW_LINE> if queued and not is_queued: <NEW_LINE> <INDENT> from madgui.util.qt import Queued <NEW_LINE> self._trigger = Queued(self._invoke) <NEW_LINE> <DEDENT> elif not queued and is_queued: <NEW_LINE> <INDENT> self._trigger = self._invoke
Set the signal to *queued mode*, i.e. signal will be emitted in another mainloop iteration. Note that queued mode requires at least a ``QCoreApplication``.
625941bf711fe17d825422a4
def _get_subdir_paths(root_path): <NEW_LINE> <INDENT> if not os.path.isdir(root_path): <NEW_LINE> <INDENT> logger.warning("Transfer path %s is not a path to a directory", root_path) <NEW_LINE> raise DirsUUIDsException <NEW_LINE> <DEDENT> exclude_paths = (root_path, os.path.join(root_path, "objects")) <NEW_LINE> return ( format_subdir_path(dir_path, root_path) for dir_path, __, ___ in scandir.walk(root_path) if dir_path not in exclude_paths )
Return a generator of subdirectory paths in ``root_path``.
625941bf460517430c3940c0
@app_views.route('states/<state_id>/cities', methods=['POST'], strict_slashes=False) <NEW_LINE> def create_city(state_id): <NEW_LINE> <INDENT> my_state = storage.get('State', state_id) <NEW_LINE> if my_state is None: <NEW_LINE> <INDENT> abort(404) <NEW_LINE> <DEDENT> if not request.json: <NEW_LINE> <INDENT> abort(400, 'Not a JSON') <NEW_LINE> <DEDENT> if 'name' not in request.json: <NEW_LINE> <INDENT> abort(400, 'Missing name') <NEW_LINE> <DEDENT> my_city = city.City(name=request.json.get('name', ""), state_id=state_id) <NEW_LINE> storage.new(my_city) <NEW_LINE> my_city.save() <NEW_LINE> return make_response(jsonify(my_city.to_dict()), 201)
Creates a City object
625941bf627d3e7fe0d68d83