code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def complete(self): <NEW_LINE> <INDENT> def follow_imports_if_possible(name): <NEW_LINE> <INDENT> par = name.parent <NEW_LINE> if isinstance(par, parsing.Import) and not isinstance(self.parser.user_stmt, parsing.Import): <NEW_LINE> <INDENT> new = imports.ImportPath(par).follow(is_goto=True) <NEW_LINE> if new: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return new <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return [name] <NEW_LINE> <DEDENT> path = self.module.get_path_until_cursor() <NEW_LINE> path, dot, like = self._get_completion_parts(path) <NEW_LINE> try: <NEW_LINE> <INDENT> scopes = list(self._prepare_goto(path, True)) <NEW_LINE> <DEDENT> except NotFoundError: <NEW_LINE> <INDENT> scopes = [] <NEW_LINE> scope_generator = evaluate.get_names_for_scope( self.parser.user_scope, self.pos) <NEW_LINE> completions = [] <NEW_LINE> for scope, name_list in scope_generator: <NEW_LINE> <INDENT> for c in name_list: <NEW_LINE> <INDENT> completions.append((c, scope)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> completions = [] <NEW_LINE> debug.dbg('possible scopes', scopes) <NEW_LINE> for s in scopes: <NEW_LINE> <INDENT> if s.isinstance(evaluate.Function): <NEW_LINE> <INDENT> names = s.get_magic_method_names() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(s, imports.ImportPath): <NEW_LINE> <INDENT> if like == 'import': <NEW_LINE> <INDENT> l = self.module.get_line(self.pos[0])[:self.pos[1]] <NEW_LINE> if not l.endswith('import import'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> names = s.get_defined_names(on_import_stmt=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> names = s.get_defined_names() <NEW_LINE> <DEDENT> <DEDENT> for c in names: <NEW_LINE> <INDENT> completions.append((c, s)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not dot: <NEW_LINE> <INDENT> call_def = self.get_in_function_call() <NEW_LINE> if call_def: <NEW_LINE> <INDENT> if not call_def.module.is_builtin(): <NEW_LINE> <INDENT> for p in call_def.params: <NEW_LINE> <INDENT> completions.append((p.get_name(), p)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if (not scopes or not isinstance(scopes[0], imports.ImportPath)) and not path: <NEW_LINE> <INDENT> bs = builtin.Builtin.scope <NEW_LINE> completions += ((k, bs) for k in keywords.get_keywords( all=True)) <NEW_LINE> <DEDENT> <DEDENT> needs_dot = not dot and path <NEW_LINE> comps = [] <NEW_LINE> for c, s in set(completions): <NEW_LINE> <INDENT> n = c.names[-1] <NEW_LINE> if settings.case_insensitive_completion and n.lower().startswith(like.lower()) or n.startswith(like): <NEW_LINE> <INDENT> if not evaluate.filter_private_variable(s, self.parser.user_stmt, n): <NEW_LINE> <INDENT> new = api_classes.Completion(c, needs_dot, len(like), s) <NEW_LINE> comps.append(new) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return sorted(comps, key=lambda x: (x.word.startswith('__'), x.word.lower()))
An auto completer for python files. :return: list of Completion objects, sorted by name and __ comes last. :rtype: list
625941bf5166f23b2e1a508d
def fontDidChangeVisibility(self, font): <NEW_LINE> <INDENT> if font in self._noInterface: <NEW_LINE> <INDENT> self._noInterface.remove(font) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._noInterface.add(font) <NEW_LINE> <DEDENT> self._fontChangingVisibility = None
Notification relay. Don't use this externally.
625941bfbde94217f3682d28
def AgentMethod(func): <NEW_LINE> <INDENT> global __SIGNATURES__ <NEW_LINE> try: <NEW_LINE> <INDENT> signatures = __SIGNATURES__[func.__name__] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise errors.BluezUnavailableAgentMethodError('method name ' + func.__name__ + ' unavailable for agent') <NEW_LINE> <DEDENT> args = inspect.getargspec(func)[0] <NEW_LINE> if len(args) - len(signatures[0]) == 3: <NEW_LINE> <INDENT> async_callbacks = (args[-2], args[-1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> async_callbacks = None <NEW_LINE> <DEDENT> warp = dbus.service.method('org.bluez.Agent', in_signature=signatures[0], out_signature=signatures[1], async_callbacks=async_callbacks) <NEW_LINE> return warp(func)
The decorator for customizing the agent methods. To use async callbacks, add two extra parameters for success callback and error callback in the def of the agent method.
625941bf1f5feb6acb0c4a88
def entity_allocation_status_get(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.entity_allocation_status_get_with_http_info(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.entity_allocation_status_get_with_http_info(**kwargs) <NEW_LINE> return data
EntityAllocationStatus_GET # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.entity_allocation_status_get(async_req=True) >>> result = thread.get() :param async_req bool :param str fields: :param str ordering: :param str page_mode: :return: object If the method is called asynchronously, returns the request thread.
625941bf60cbc95b062c6477
def _decide_if_will_buy(self, item): <NEW_LINE> <INDENT> if self.obj_classes_accepted: <NEW_LINE> <INDENT> for i in self.obj_classes_accepted: <NEW_LINE> <INDENT> if isinstance(item, i): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> return True
Decide if the shopkeeper will buy the item. Can be overloaded for more complex functionality.
625941bfb57a9660fec337b6
def _is_legal(self, peg, move): <NEW_LINE> <INDENT> endpoint = self._endpoint(peg, move) <NEW_LINE> midpoint = self._midpoint(peg, move) <NEW_LINE> try: <NEW_LINE> <INDENT> if not self.board[midpoint] or self.board[endpoint]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False
Determines if a move is legal or not.
625941bfbe383301e01b53c0
def __init__(self, links): <NEW_LINE> <INDENT> self.links = {} <NEW_LINE> for link in links: <NEW_LINE> <INDENT> self.links[link.rel] = link <NEW_LINE> setattr(self, link.rel, link)
Enable Links to access attributes with 'dot'
625941bf2c8b7c6e89b356f7
def get_episode_thumb_path(self, ep_obj): <NEW_LINE> <INDENT> if ek.ek(os.path.isfile, ep_obj.location): <NEW_LINE> <INDENT> tbn_filename = ep_obj.location + '.cover.jpg' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return tbn_filename
Returns the path where the episode thumbnail should be stored. Defaults to the same path as the episode file but with a .cover.jpg extension. ep_obj: a TVEpisode instance for which to create the thumbnail
625941bf7047854f462a1341
def has_face(self): <NEW_LINE> <INDENT> return (len(self.faces) > 0)
Returns True of False whether images have been found for the current image or not. :returns: boolean -- True if at least one face has been found
625941bf7c178a314d6ef390
def list( self, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = self.list.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.ExpressRouteServiceProviderPaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.ExpressRouteServiceProviderPaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Gets all the available express route service providers. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of ExpressRouteServiceProvider :rtype: ~azure.mgmt.network.v2018_02_01.models.ExpressRouteServiceProviderPaged[~azure.mgmt.network.v2018_02_01.models.ExpressRouteServiceProvider] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941bf9f2886367277a7c4
def create_user(self, username=None, email=None, password=None): <NEW_LINE> <INDENT> if username is None: <NEW_LINE> <INDENT> raise TypeError("Users must have a username.") <NEW_LINE> <DEDENT> if password is None: <NEW_LINE> <INDENT> raise TypeError("Users must have a password.") <NEW_LINE> <DEDENT> if email is None: <NEW_LINE> <INDENT> raise TypeError("Users must have an email address.") <NEW_LINE> <DEDENT> user = self.model(username=username, email=self.normalize_email(email)) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> return user
Arguments: username email password Returns: (User): ????
625941bf287bf620b61d399a
def drop_breadcrumb(step_name, crumb, value=True): <NEW_LINE> <INDENT> breadcrumbs = inject.get_injectable('breadcrumbs', OrderedDict()) <NEW_LINE> breadcrumbs.setdefault(step_name, {'name': step_name})[crumb] = value <NEW_LINE> inject.add_injectable('breadcrumbs', breadcrumbs) <NEW_LINE> write_breadcrumbs(breadcrumbs)
Add (crumb: value) to specified step in breadcrumbs and flush breadcrumbs to file run can be resumed with resume_after Breadcrumbs provides a record of steps that have been run for use when resuming Basically, we want to know which steps have been run, which phases completed (i.e. apportion, simulate, coalesce). For multi-processed simulate steps, we also want to know which sub-processes completed successfully, because if resume_after is LAST_CHECKPOINT we don't have to rerun the successful ones. Parameters ---------- step_name : str crumb : str value : yaml-writable value Returns -------
625941bf091ae35668666e98
def testNewVMXenPV(app): <NEW_LINE> <INDENT> app.uri = tests.utils.URIs.xen <NEW_LINE> newvm = _open_newvm(app) <NEW_LINE> newvm.find_fuzzy("Architecture options", "toggle").click() <NEW_LINE> newvm.combo_select("Xen Type", ".*paravirt.*") <NEW_LINE> newvm.find_fuzzy("Import", "radio").click() <NEW_LINE> _forward(newvm) <NEW_LINE> newvm.find("import-entry").set_text("/pool-dir/testvol1.img") <NEW_LINE> newvm.find("oslist-entry").set_text("generic") <NEW_LINE> newvm.find("oslist-popover").find_fuzzy("generic").click() <NEW_LINE> _forward(newvm) <NEW_LINE> _forward(newvm) <NEW_LINE> newvm.find_fuzzy("Finish", "button").click() <NEW_LINE> app.find_details_window("vm1") <NEW_LINE> lib.utils.check(lambda: not newvm.showing)
Test the create wizard with a fake xen PV install
625941bf24f1403a92600a9d
def start_node(i, dirname, extra_args=None, rpchost=None): <NEW_LINE> <INDENT> datadir = os.path.join(dirname, "node"+str(i)) <NEW_LINE> args = [ os.getenv("BITCOIND", "Indiumd"), "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ] <NEW_LINE> if extra_args is not None: args.extend(extra_args) <NEW_LINE> bitcoind_processes[i] = subprocess.Popen(args) <NEW_LINE> devnull = open("/dev/null", "w+") <NEW_LINE> subprocess.check_call([ os.getenv("BITCOINCLI", "indium-cli"), "-datadir="+datadir] + _rpchost_to_args(rpchost) + ["-rpcwait", "getblockcount"], stdout=devnull) <NEW_LINE> devnull.close() <NEW_LINE> url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i)) <NEW_LINE> proxy = AuthServiceProxy(url) <NEW_LINE> proxy.url = url <NEW_LINE> return proxy
Start a Indiumd and return RPC connection to it
625941bf0fa83653e4656ef1
def fight_to_death(attacker, defender): <NEW_LINE> <INDENT> attacker.init_total_stats() <NEW_LINE> defender.init_total_stats() <NEW_LINE> rounds = 0 <NEW_LINE> units = [attacker,defender] <NEW_LINE> while attacker.is_alive() and defender.is_alive() and rounds < 99: <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print("===============") <NEW_LINE> <DEDENT> attacker.threaten(defender,ignore_range=True) <NEW_LINE> attacker.attack_unit(defender) <NEW_LINE> attacker.end_turn() <NEW_LINE> defender.end_turn() <NEW_LINE> if defender.is_alive(): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print("===============") <NEW_LINE> <DEDENT> defender.threaten(attacker,ignore_range=True) <NEW_LINE> defender.attack_unit(attacker) <NEW_LINE> attacker.end_turn() <NEW_LINE> defender.end_turn() <NEW_LINE> <DEDENT> rounds +=1 <NEW_LINE> <DEDENT> if attacker.is_alive() and defender.is_alive(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif attacker.is_alive() and not defender.is_alive(): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1
:param attacker: Unit :param defender: Unit :return:
625941bf7d847024c06be1ee
def mode(data_set): <NEW_LINE> <INDENT> count0 = 0 <NEW_LINE> count1 = 0 <NEW_LINE> for x in data_set: <NEW_LINE> <INDENT> if x[0] == 0: <NEW_LINE> <INDENT> count0 += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> count1 += 1 <NEW_LINE> <DEDENT> <DEDENT> if count0 > count1: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1
======================================================================================================== Input: A data_set ======================================================================================================== Job: Takes a data_set and finds mode of index 0. ======================================================================================================== Output: mode of index 0. ========================================================================================================
625941bfd486a94d0b98e07a
def walk(paths: str, ignore: str) -> Iterator[str]: <NEW_LINE> <INDENT> for file_or_dir in paths: <NEW_LINE> <INDENT> file_or_dir = path.normpath(file_or_dir) <NEW_LINE> if path.isdir(file_or_dir): <NEW_LINE> <INDENT> for root, dirs, files in os.walk(file_or_dir): <NEW_LINE> <INDENT> for filename in sorted(files): <NEW_LINE> <INDENT> if not re.match(ignore, filename): <NEW_LINE> <INDENT> yield path.join(root, filename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> continue <NEW_LINE> <DEDENT> if path.exists(file_or_dir): <NEW_LINE> <INDENT> if not re.match(ignore, file_or_dir): <NEW_LINE> <INDENT> yield file_or_dir
Yield all the files under 'paths'. Args: paths: A list of filenames and/or directory names. ignore: A regular expression for filenames to ignore. Yields: Absolute filenames not matching 'ignore'.
625941bf4f6381625f114972
def will_process_path(self, path, device=None): <NEW_LINE> <INDENT> self._will_process_path(path, device)
Call we've started processing metadata for a file Only safe to be called from eventloop.
625941bf56b00c62f0f1458d
def regrid_pr_historical(X): <NEW_LINE> <INDENT> fobs = cdms.open("/work/marvel1/SEASONAL/OBS/GPCP.precip.mon.mean.nc") <NEW_LINE> the_grid = fobs("precip").getGrid() <NEW_LINE> fobs.close() <NEW_LINE> start = '1900-1-1' <NEW_LINE> stop = '2005-12-31' <NEW_LINE> return X(time=(start,stop)).regrid(the_grid,regridTool='regrid2')
regrid CMIP5 pr to obs grid
625941bf50812a4eaa59c259
def is_zero(self, a: List[StrictInt]) -> bool: <NEW_LINE> <INDENT> return all([x == 0 for x in a])
Checks whether all the coefficients of a polynomial are zero or not. Args: a: List of integers representing the polynomial. Returns: True if all coefficients are zero, False otherwise.
625941bfb7558d58953c4e4e
def all(self, asjson=False): <NEW_LINE> <INDENT> if asjson: <NEW_LINE> <INDENT> return [x.__json__() for x in self._visitors.values()] <NEW_LINE> <DEDENT> return self._visitors.values()
Return a list of all visitors in the collection
625941bf8e7ae83300e4af01
def cleanup(self): <NEW_LINE> <INDENT> if not self.__builddir: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._cleanup() <NEW_LINE> shutil.rmtree(self.__builddir, ignore_errors=True) <NEW_LINE> self.__builddir = None <NEW_LINE> self.__clean_tmpdir()
Undo anything performed in create(). Note, make sure to call this method once finished with the creator instance in order to ensure no stale files are left on the host e.g.: creator = ImageCreator(ks, name) try: creator.create() finally: creator.cleanup()
625941bf1f037a2d8b946134
def __init__(self, recipients=None, parent=None, field=None): <NEW_LINE> <INDENT> self._parent = parent <NEW_LINE> self._field = field <NEW_LINE> self._recipients = [] <NEW_LINE> self.untrack = True <NEW_LINE> if recipients: <NEW_LINE> <INDENT> self.add(recipients) <NEW_LINE> <DEDENT> self.untrack = False
Recipients must be a list of either address strings or tuples (name, address) or dictionary elements :param recipients: list of either address strings or tuples (name, address) or dictionary elements :type recipients: list[str] or list[tuple] or list[dict] or list[Recipient] :param HandleRecipientsMixin parent: parent recipients handler :param str field: name of the field to update back
625941bf7b180e01f3dc4737
def __add__(self, tuple): <NEW_LINE> <INDENT> print('This API/syntax will change in the next major release. Please' ' use "+=" for modifying components') <NEW_LINE> self.update_component('add', *tuple)
Allow for updating of Vega with add operator
625941bfab23a570cc2500b5
def go_to_terms(self): <NEW_LINE> <INDENT> self.find_element(*self._legal_locator).click() <NEW_LINE> self.find_element(*self._term_locator).click()
Go to terms.
625941bf07d97122c41787bb
def parse_http_dict(header_val): <NEW_LINE> <INDENT> if not header_val: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> ans = {} <NEW_LINE> sep, dquote = b'="' if isinstance(header_val, bytes) else '="' <NEW_LINE> for item in parse_http_list(header_val): <NEW_LINE> <INDENT> k, v = item.partition(sep)[::2] <NEW_LINE> if k: <NEW_LINE> <INDENT> if v.startswith(dquote) and v.endswith(dquote): <NEW_LINE> <INDENT> v = v[1:-1] <NEW_LINE> <DEDENT> ans[k] = v <NEW_LINE> <DEDENT> <DEDENT> return ans
Parse an HTTP comma separated header with items of the form a=1, b="xxx" into a dictionary
625941bf66656f66f7cbc0df
def bool_to_string(b): <NEW_LINE> <INDENT> if b: <NEW_LINE> <INDENT> return "true" <NEW_LINE> <DEDENT> return "false"
Takes a python boolean and returns a string for xml
625941bf187af65679ca5053
def plot_graph(self, file_path: str = None): <NEW_LINE> <INDENT> logging.info("THRESHOLD plot_graph called") <NEW_LINE> fig = plt.figure(figsize=(10, 6)) <NEW_LINE> plt.title("{}".format(self.name)) <NEW_LINE> plt.rcParams['text.antialiased'] = True <NEW_LINE> plt.style.use('ggplot') <NEW_LINE> ax1 = fig.add_subplot(211) <NEW_LINE> ax1.grid(True) <NEW_LINE> ax1.plot(self.time, self.raw_signal, label='Raw Signal', linewidth=1, antialiased=True) <NEW_LINE> ax1.plot(self.time, self.filtered_signal, label='Filtered Signal', linewidth=1, antialiased=True) <NEW_LINE> ax1.plot(self.time, np.ones(len(self.time)) * self.threshold, label='Threshold', linewidth=1, antialiased=True) <NEW_LINE> _, max_val = self._find_voltage_extremes(self.filtered_signal) <NEW_LINE> ax1.plot(self.time, self.binary_signal * max_val, label='Binary Signal', linewidth=5, antialiased=True) <NEW_LINE> ax1.plot(self.time, self.binary_centers * max_val, label='Binary Centers', linewidth=5, antialiased=True) <NEW_LINE> ax1.legend(loc='best') <NEW_LINE> ax2 = fig.add_subplot(212) <NEW_LINE> freq_raw, fft_out_raw = self.filtered_signal_obj.get_fft(is_filtered=False) <NEW_LINE> ax2.plot(freq_raw, abs(fft_out_raw), label='Raw Signal', linewidth=1) <NEW_LINE> freq_filtered, fft_out_filtered = self.filtered_signal_obj.get_fft(is_filtered=True) <NEW_LINE> ax2.plot(freq_filtered, abs(fft_out_filtered), label='Filtered Signal', linewidth=1) <NEW_LINE> ax2.set_xlabel('Freq (Hz)') <NEW_LINE> ax2.set_ylabel('|Y(freq)|') <NEW_LINE> ax2.legend(loc='best') <NEW_LINE> fig.tight_layout() <NEW_LINE> if file_path: <NEW_LINE> <INDENT> fig.savefig(file_path) <NEW_LINE> <DEDENT> plt.show() <NEW_LINE> plt.close()
Plots a graph of thresholding and frequency information for the threshold algorithm. Args: file_path: The path of the file to output.
625941bff8510a7c17cf9630
def __init__(self, channel): <NEW_LINE> <INDENT> self.ReadSchema = channel.unary_unary( '/authzed.api.v1.SchemaService/ReadSchema', request_serializer=authzed_dot_api_dot_v1_dot_schema__service__pb2.ReadSchemaRequest.SerializeToString, response_deserializer=authzed_dot_api_dot_v1_dot_schema__service__pb2.ReadSchemaResponse.FromString, ) <NEW_LINE> self.WriteSchema = channel.unary_unary( '/authzed.api.v1.SchemaService/WriteSchema', request_serializer=authzed_dot_api_dot_v1_dot_schema__service__pb2.WriteSchemaRequest.SerializeToString, response_deserializer=authzed_dot_api_dot_v1_dot_schema__service__pb2.WriteSchemaResponse.FromString, )
Constructor. Args: channel: A grpc.Channel.
625941bf3c8af77a43ae36d3
def findRelativeRanks(nums): <NEW_LINE> <INDENT> output = list() <NEW_LINE> t = sorted(nums, reverse=True) <NEW_LINE> for i in nums: <NEW_LINE> <INDENT> if t.index(i) == 0: <NEW_LINE> <INDENT> output.append("Gold Medal") <NEW_LINE> <DEDENT> elif t.index(i) == 1: <NEW_LINE> <INDENT> output.append("Silver Medal") <NEW_LINE> <DEDENT> elif t.index(i) == 2: <NEW_LINE> <INDENT> output.append("Bronze Medal") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output.append(str(t.index(i)-1)) <NEW_LINE> <DEDENT> <DEDENT> return output
:type nums: List[int] :rtype: List[str]
625941bf4a966d76dd550f42
def getmtime(self): <NEW_LINE> <INDENT> return self.module.getmtime(self)
.. seealso:: :attr:`mtime`, :func:`os.path.getmtime`
625941bf4f88993c3716bfa0
def insert(self, val): <NEW_LINE> <INDENT> if val in self.s: <NEW_LINE> <INDENT> self.s.append(val) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.s.append(val) <NEW_LINE> return True
Inserts a value to the collection. Returns true if the collection did not already contain the specified element. :type val: int :rtype: bool
625941bf63b5f9789fde701a
def _sanitize_subdomain_name(input, length=None): <NEW_LINE> <INDENT> s = input.strip().replace(' ', '-') <NEW_LINE> s = '{}'.format(s[1:] if s.startswith('-') else s) <NEW_LINE> s = '{}'.format(s[:-1] if s.endswith('-') else s) <NEW_LINE> s = s.replace('.', '-') <NEW_LINE> s = s.replace('_', '-') <NEW_LINE> s = re.sub(r'(?u)[^-\w]', '', s) <NEW_LINE> return s[:length]
Sanitize input string to be used as name replacing ' ', '.', '_' by '-' and removing every char but '-', and word chars '\w' # noqa: W605 According to the pertinent internet recommendations (RFC3986 section 2.2, which in turn refers to: RFC1034 section 3.5 and RFC1123 section 2.1), a subdomain must meet several requirements: - Each subdomain part must have a length no greater than 63. - Each subdomain part must begin and end with an alpha-numeric (i.e. letters [A-Za-z] or digits [0-9]). - Each subdomain part may contain hyphens (dashes), but may not begin or end with a hyphen. :param input: string to be sanitized. :param length: truncate the resulting string to the provided length. Defaults to not truncate. :return: string
625941bfbd1bec0571d90563
def test06_parse_rotation_bad(self): <NEW_LINE> <INDENT> r = IIIFRequest(api_version='3.0') <NEW_LINE> r.rotation = '-1' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation) <NEW_LINE> r.rotation = '-0.0000001' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation) <NEW_LINE> r.rotation = '360.0000001' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation) <NEW_LINE> r.rotation = 'abc' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation) <NEW_LINE> r.rotation = '1!' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation) <NEW_LINE> r.rotation = '!!4' <NEW_LINE> self.assertRaises(IIIFError, r.parse_rotation)
Parse rotation - bad requests.
625941bf3346ee7daa2b2c9f
def visit_Name(self, name): <NEW_LINE> <INDENT> self.__input_set.add(name.id) <NEW_LINE> if name.id not in self.__graph_elements: <NEW_LINE> <INDENT> self.__graph_elements[name.id] = tf.placeholder( self.__dtype, name=name.id) <NEW_LINE> <DEDENT> return name.id
Visit a (variable) Name in the ast This may create a variable or read from a variable TODO(buckbaskin): known issue: this doesn't do var assignment yet
625941bf96565a6dacc8f601
def load_style_sheet(stylesheet, obj): <NEW_LINE> <INDENT> file = QFile(stylesheet) <NEW_LINE> file.open(QFile.ReadOnly) <NEW_LINE> obj.setStyleSheet(QTextStream(file).readAll()) <NEW_LINE> file.close()
Loads the given style file to the targeted qt app
625941bf96565a6dacc8f602
def set_ClientID(self, value): <NEW_LINE> <INDENT> super(TranslateArrayInputSet, self)._set_input('ClientID', value)
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID obtained when signing up for Microsoft Translator on Azure Marketplace. This is required unless providing an AccessToken.)
625941bf31939e2706e4cda2
def _add_storages(self, stors: List[LPStorage]): <NEW_LINE> <INDENT> for stor in stors: <NEW_LINE> <INDENT> self.objective.SetCoefficient(stor.var_capacity, stor.cost) <NEW_LINE> self.logger.debug("Add storage %s into objective", stor.name)
Add storage cost. Cost of store for each time-step :param stors: list of storages :return:
625941bf92d797404e3040be
def load_format(): <NEW_LINE> <INDENT> cmd_fmt = {} <NEW_LINE> with open('fmt.json') as f: <NEW_LINE> <INDENT> raw_json = json.load(f) <NEW_LINE> <DEDENT> for o in raw_json: <NEW_LINE> <INDENT> cmd_fmt[o['cmd']] = o <NEW_LINE> <DEDENT> return cmd_fmt
This function loads in the data from fmt.json to determine the appropriate format for various commands (cat, sort, etc.) and returns this loaded data as a dictionary object
625941bfa79ad161976cc07a
@app.route('/<make>') <NEW_LINE> def selectedpage(make): <NEW_LINE> <INDENT> makes = session.query(Make).all() <NEW_LINE> selected = session.query(Make).filter_by(name=make).first() <NEW_LINE> if 'email' in login_session: <NEW_LINE> <INDENT> user = session.query(User).filter_by( email=login_session['email']).first() <NEW_LINE> return render_template("selected.html", user=user, makes=makes, selected=selected) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render_template("selected.html", makes=makes, selected=selected)
Show catalog for a specific make ARG: name of selected make Returns: Template for catalog page with make selected
625941bf24f1403a92600a9e
def do(self, dispatcher, monitor): <NEW_LINE> <INDENT> self.driver.location = self.rider.origin <NEW_LINE> monitor.notify(self.timestamp, RIDER, PICKUP, self.rider.id, self.rider.origin) <NEW_LINE> monitor.notify(self.timestamp, DRIVER, PICKUP, self.driver.id, self.driver.location) <NEW_LINE> event=[] <NEW_LINE> if self.rider.status == WAITING: <NEW_LINE> <INDENT> self.driver.destination = self.rider.destination <NEW_LINE> travel_time = self.driver.start_ride(self.rider) <NEW_LINE> event.append(Dropoff(self.timestamp + travel_time, self.rider, self.driver)) <NEW_LINE> <DEDENT> elif self.rider.status == CANCELLED: <NEW_LINE> <INDENT> event.append(DriverRequest(self.timestamp, self.driver)) <NEW_LINE> <DEDENT> return event
Sets the driver's location to the rider's destination, Leaves the rider satisfied @type self: Event @type dispatcher: Dispatcher @type monitor: Monitor @rtype: list[Event]
625941bf21bff66bcd68488a
def __init__(self, command_type=None, win_id=0): <NEW_LINE> <INDENT> CommandNotebookEvent.__init__(self, command_type, win_id) <NEW_LINE> if type(command_type) in (int,): <NEW_LINE> <INDENT> self.notify = wx.NotifyEvent(command_type, win_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.notify = wx.NotifyEvent(command_type.GetEventType(), command_type.GetId())
Default class constructor. :param `command_type`: the event kind or an instance of :class:`PyCommandEvent`. :param integer `win_id`: the window identification number.
625941bfa17c0f6771cbdf88
def _reject(self, key): <NEW_LINE> <INDENT> (minions_accepted, minions_pre, minions_rejected) = self._check_minions_directories() <NEW_LINE> pre = os.listdir(minions_pre) <NEW_LINE> if key not in pre: <NEW_LINE> <INDENT> err = ('The host named {0} is unavailable, please accept an ' 'available key').format(key) <NEW_LINE> self._log(err, level='error') <NEW_LINE> sys.exit(43) <NEW_LINE> <DEDENT> shutil.move(os.path.join(minions_pre, key), os.path.join(minions_rejected, key)) <NEW_LINE> self._log('{0} key rejected.'.format(key), level='info')
Reject a specified host's public key
625941bf5fdd1c0f98dc0167
def rm_pid(mypath): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(mypath) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> logger.error('Unable to delete pidfile "{0}"'.format(mypath))
Delete pid file on clean exit :arg mypath: The pid file.
625941bfb830903b967e9842
def get_userinfo(self,user,pw,command): <NEW_LINE> <INDENT> nowrite=['OPTIONS','PROPFIND','GET'] <NEW_LINE> Mysql=self._config.MySQL <NEW_LINE> DB=Mconn(Mysql.user,Mysql.passwd,Mysql.host,Mysql.port,Mysql.dbtable) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print(user,command, file=sys.stderr) <NEW_LINE> <DEDENT> qry="select * from %s.Users where User='%s' and Pass='%s'"%(Mysql.dbtable,user,pw) <NEW_LINE> Auth=DB.execute(qry) <NEW_LINE> if len(Auth) == 1: <NEW_LINE> <INDENT> can_write=Auth[0][3] <NEW_LINE> if not can_write and not command in nowrite: <NEW_LINE> <INDENT> self._log('Authentication failed for user %s using command %s' %(user,command)) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._log('Successfully authenticated user %s writable=%s' % (user,can_write)) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._log('Authentication failed for user %s' % user) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> self._log('Authentication failed for user %s' % user) <NEW_LINE> return 0
authenticate user
625941bf82261d6c526ab3d1
def create_star(start, finish, by, total): <NEW_LINE> <INDENT> pass
Docs here. To keep it real, let's write them later and forget about it (;
625941bf99cbb53fe6792b1c
def K2F(K): <NEW_LINE> <INDENT> return C2F(K2C(_np.asanyarray(K)))
Convert Kelvin to Fahrenheit Parameters ---------- K : array_like Kelvin temperature(s) to be converted. Returns ------- F : float or array of floats Equivalent Fahrenheit temperature(s). Notes ----- Computes ``F = 1.8 * (K - zero_Celsius) + 32`` where `zero_Celsius` = 273.15, i.e., (the absolute value of) temperature "absolute zero" as measured in Celsius. Examples -------- >>> from scipy.constants import K2F >>> K2F(np.array([233.15, 313.15])) array([ -40., 104.])
625941bfcb5e8a47e48b79e3
def get_mask_scores(self, mask_iou_pred, det_bboxes, det_labels): <NEW_LINE> <INDENT> inds = range(det_labels.size(0)) <NEW_LINE> mask_scores = mask_iou_pred[inds, det_labels + 1] * det_bboxes[inds, -1] <NEW_LINE> mask_scores = mask_scores.cpu().numpy() <NEW_LINE> det_labels = det_labels.cpu().numpy() <NEW_LINE> return [ mask_scores[det_labels == i] for i in range(self.num_classes - 1) ]
Get the mask scores. mask_score = bbox_score * mask_iou
625941bf8e05c05ec3eea2a7
@manager.command <NEW_LINE> def create_api_user(username): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = APIUser.create(name=username.lower()) <NEW_LINE> print(user.name) <NEW_LINE> print(user.api_key) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> print("Error: {0} already exists".format(username), file=stderr) <NEW_LINE> exit(-1)
Create an API user
625941bf6fb2d068a760efd0
def test_maybe_download_and_store_single_file(self): <NEW_LINE> <INDENT> return
Given a path, test_maybe_download_and_store_single_file will return a set of keys
625941bf15baa723493c3ea9
def create_or_update( self, farmer_id, harvest_data_id, harvest_data=None, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2021-03-31-preview" <NEW_LINE> content_type = kwargs.pop("content_type", "application/merge-patch+json") <NEW_LINE> accept = "application/json" <NEW_LINE> url = self.create_or_update.metadata['url'] <NEW_LINE> path_format_arguments = { 'Endpoint': self._serialize.url("self._config.endpoint", self._config.endpoint, 'str', skip_quote=True), 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'), 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> body_content_kwargs = {} <NEW_LINE> if harvest_data is not None: <NEW_LINE> <INDENT> body_content = self._serialize.body(harvest_data, 'HarvestData') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> body_content = None <NEW_LINE> <DEDENT> body_content_kwargs['content'] = body_content <NEW_LINE> request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200, 201]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) <NEW_LINE> raise HttpResponseError(response=response, model=error) <NEW_LINE> <DEDENT> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('HarvestData', pipeline_response) <NEW_LINE> <DEDENT> if response.status_code == 201: <NEW_LINE> <INDENT> deserialized = self._deserialize('HarvestData', pipeline_response) <NEW_LINE> <DEDENT> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized
Creates or updates harvest data resource under a particular farmer. :param farmer_id: ID of the farmer. :type farmer_id: str :param harvest_data_id: ID of the harvest data resource. :type harvest_data_id: str :param harvest_data: Harvest data resource payload to create or update. :type harvest_data: ~azure.agrifood.farming.models.HarvestData :keyword callable cls: A custom type or function that will be passed the direct response :return: HarvestData, or the result of cls(response) :rtype: ~azure.agrifood.farming.models.HarvestData :raises: ~azure.core.exceptions.HttpResponseError
625941bfab23a570cc2500b6
def test_update_expense_OK(self): <NEW_LINE> <INDENT> self.dataservice.add_expense('john@doe.com', 50, 'matatu') <NEW_LINE> expense = self.dataservice.USERS['john@doe.com'].expenses[0] <NEW_LINE> actual = self.dataservice.update_expense( 'john@doe.com', expense.id, 50, 'matatu') <NEW_LINE> expected = i18n.t('wallet.expense_updated') <NEW_LINE> self.assertEqual(actual, expected)
Tests updating expenses with valid details
625941bfbd1bec0571d90564
def compile_subroutineBody(self): <NEW_LINE> <INDENT> self.output.write(self.tag("subroutineBody") + NEW_LINE) <NEW_LINE> self.checkSymbol("{") <NEW_LINE> self.tokenizer.advance() <NEW_LINE> more_vars = True <NEW_LINE> while(more_vars): <NEW_LINE> <INDENT> if self.compile_var_dec(False) is False: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.tokenizer.advance() <NEW_LINE> if self.tokenizer.current_value != "var": <NEW_LINE> <INDENT> more_vars = False <NEW_LINE> <DEDENT> <DEDENT> self.compile_statements() <NEW_LINE> self.checkSymbol("}") <NEW_LINE> self.output.write(self.ctag("subroutineBody") + NEW_LINE)
Compiles subroutine's body
625941bf23849d37ff7b2fc6
def encode_tta(file, pcmreader): <NEW_LINE> <INDENT> writer = BitstreamWriter(file, True) <NEW_LINE> block_size = (pcmreader.sample_rate * 256) / 245 <NEW_LINE> frame_sizes = [] <NEW_LINE> framelist = pcmreader.read(block_size) <NEW_LINE> while (len(framelist) > 0): <NEW_LINE> <INDENT> frame_sizes.append(encode_tta_frame(writer, pcmreader.bits_per_sample, framelist)) <NEW_LINE> framelist = pcmreader.read(block_size) <NEW_LINE> <DEDENT> writer.flush() <NEW_LINE> return frame_sizes
given a file object and buffered PCMReader, writes TTA frames to the writer and returns a list of TTA frame lengths, in bytes
625941bf97e22403b379cece
def subject_propagator(self) -> "IPSyntaxChecker": <NEW_LINE> <INDENT> self.ipv4_checker.subject = self.idna_subject <NEW_LINE> self.ipv6_checker.subject = self.idna_subject <NEW_LINE> self.status = SyntaxCheckerStatus() <NEW_LINE> self.status.subject = self.subject <NEW_LINE> self.status.idna_subject = self.idna_subject <NEW_LINE> return self
Propagate the currently set subject. .. warning:: You are not invited to run this method directly.
625941bf851cf427c661a447
def findmax(self, col): <NEW_LINE> <INDENT> ind = np.argmax(self.data[col]) <NEW_LINE> mx = np.average(self.data[col][ind-5:ind+5]) <NEW_LINE> return mx
Finds index of the absolute maximum. Performs an average of +- 5 datapoints from this index and returns that as the max value. Parameters ---------- col : TYPE int Datacolumn for which the maximum should be found. Typically the Kerr voltage. Returns ------- mx : TYPE float The maximum value, averaged over the neighbouring datapoints from the absolute maximum value.
625941bf3539df3088e2e281
@cli.command() <NEW_LINE> @endpoint_arg("endpoint") <NEW_LINE> @click.option( "--path", type=str, default="~/", help="The path to list the contents of. Defaults to '~/'.", ) <NEW_LINE> @click.option( "--verbose/--compact", default=True, help="Whether the JSON representation should be verbose or compact. The default is verbose.", ) <NEW_LINE> @click.pass_obj <NEW_LINE> def manifest(settings, endpoint, path, verbose): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> json_dumps_kwargs = dict(indent=2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_dumps_kwargs = dict(indent=None, separators=(",", ":")) <NEW_LINE> <DEDENT> tc = get_transfer_client_or_exit(settings[constants.AUTH].get(constants.REFRESH_TOKEN)) <NEW_LINE> activate_endpoints_or_exit(tc, [endpoint]) <NEW_LINE> entries = list(tc.operation_ls(endpoint, path=path)) <NEW_LINE> click.secho(json.dumps(entries, **json_dumps_kwargs))
Print a JSON manifest of directory contents on an endpoint. The manifest can be printed in verbose, human-readable JSON or in compact, hard-for-humans JSON. Use --compact if you are worried about the size of the manifest. Otherwise, use --verbose (which is the default).
625941bf287bf620b61d399b
def _update_weights(self, xi, target): <NEW_LINE> <INDENT> output = self._net_input(xi) <NEW_LINE> error = (target - output) <NEW_LINE> self.w_[1:] += self.eta * xi.dot(error) <NEW_LINE> self.w_[0] += self.eta * error <NEW_LINE> cost = 0.5 * error**2 <NEW_LINE> return cost
Apply Adaline learning rule to update the weights.
625941bf57b8e32f524833cf
def negative(img): <NEW_LINE> <INDENT> for pixel in img: <NEW_LINE> <INDENT> x, y, col = pixel <NEW_LINE> r, g, b = col <NEW_LINE> r = 255-r <NEW_LINE> g = 255-g <NEW_LINE> b = 255-b <NEW_LINE> col = create_color(r,g,b) <NEW_LINE> set_color(img, x, y, col) <NEW_LINE> <DEDENT> return img
make image negative
625941bf26068e7796caec10
def read(self): <NEW_LINE> <INDENT> return QString()
static QString QDeclarativeProperty.read()
625941bf167d2b6e31218acc
def draw_all(file_path): <NEW_LINE> <INDENT> flag = False <NEW_LINE> counts = get_date(COUNT_FILENAME) <NEW_LINE> if get_day() == MONTH_DAY: <NEW_LINE> <INDENT> counts = get_month_data(counts) <NEW_LINE> flag = True <NEW_LINE> <DEDENT> detail = get_detail_count(counts) <NEW_LINE> font = get_font(20) <NEW_LINE> try: <NEW_LINE> <INDENT> fig = plt.figure(figsize=(IMG_WIDTH, IMG_HEIGHT), dpi=DPI) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> ax = fig.add_subplot(231) <NEW_LINE> draw_rect(detail, PV_LABEL, ax, font, 'pv', 'r') <NEW_LINE> ax = fig.add_subplot(232) <NEW_LINE> draw_rect(detail, RANK_LABEL, ax, font, 'rank', 'k') <NEW_LINE> ax = fig.add_subplot(233) <NEW_LINE> draw_rect(detail, POINTS_LABEL, ax, font, 'points', 'm') <NEW_LINE> ax = fig.add_subplot(234) <NEW_LINE> draw_line(detail, PV_LABEL, ax, font, 'pv', 'r') <NEW_LINE> ax = fig.add_subplot(235) <NEW_LINE> draw_line(detail, RANK_LABEL, ax, font, 'rank', 'k') <NEW_LINE> ax = fig.add_subplot(236) <NEW_LINE> draw_line(detail, POINTS_LABEL, ax, font, 'points', 'm') <NEW_LINE> plt.savefig(file_path) <NEW_LINE> plt.close() <NEW_LINE> return flag
画图 color: b: blue g: green r: red c: cyan m: magenta y: yellow k: black w: white
625941bf76e4537e8c3515a6
def test_failed_company_creation(self): <NEW_LINE> <INDENT> res = self.client().post( '/api/v1/companies', data=None, content_type='application/json', headers={AUTH_HEADER: self.token} ) <NEW_LINE> assert res.status_code, 400
Test failed company creation
625941bf925a0f43d2549daa
@blueprint.route('/logout/') <NEW_LINE> @login_required <NEW_LINE> def logout(): <NEW_LINE> <INDENT> logout_user() <NEW_LINE> flash('You are logged out.', 'info') <NEW_LINE> return redirect(url_for('public.catch_all'))
Logout.
625941bfc432627299f04b7a
def testIsExtraRequire_methodNotOnClass(self): <NEW_LINE> <INDENT> input_lines = [ 'goog.require(\'package.subpackage.method\');', 'var x = package.subpackage.method()', ] <NEW_LINE> token = testutil.TokenizeSource(input_lines) <NEW_LINE> namespaces_info = self._GetInitializedNamespacesInfo(token, ['package'], []) <NEW_LINE> self.assertFalse(namespaces_info.IsExtraRequire(token), 'Methods can be required except on classes.');
Tests that requiring a method not on a class is OK.
625941bf0a366e3fb873e74e
def image_spike_histogram(sself, bin=10, display=True, id_list=[], n_rep=1, normalized=True, kwargs = {} ): <NEW_LINE> <INDENT> ax = get_display(display) <NEW_LINE> timeAxis, histograms = self.spike_histogram_n_rep( bin, id_list, n_rep, normalized ) <NEW_LINE> kwargs.update( { 'origin' : 'lower', } ) <NEW_LINE> image = ax.imshow( histograms, **kwargs ) <NEW_LINE> ax.set_xlabel( 'Time (ms)' ) <NEW_LINE> ax.set_ylabel( 'Neuron #' ) <NEW_LINE> ax.set_aspect( aspect = 'auto' ) <NEW_LINE> n_points=len(timeAxis) <NEW_LINE> xticks = numpy.arange( 0, n_points, n_points*0.2) <NEW_LINE> xticklabels = numpy.arange( 0, timeAxis[-1], timeAxis[-1]*0.2) <NEW_LINE> ax.set_xticks( xticks ) <NEW_LINE> ax.set_xticklabels( [ str( l ) for l in xticklabels ] ) <NEW_LINE> n_ids=len(id_list) <NEW_LINE> yticks = numpy.arange( 0, n_ids, n_ids*0.2) <NEW_LINE> ax.set_yticks( yticks) <NEW_LINE> ax.set_yticklabels( id_list[ yticks ] ) <NEW_LINE> return image
Plot an image of all the spike_histograms generated by spike_histogram_n_rep Arguments: bin - the time bin used to gather the data display - If True, a new figure is created. Could also be a subplot id_list - list with ids to use for creation of histogram n_rep - Number of experimental repetitions with same stimulation. E.g. if n_runs=3 then it is assumed that three experimental runs is recorded and the data will be sliced up into three time intervals. normalized - if True, the histogram are in Hz (spikes/second), otherwise they are in spikes/bin kwargs . Additional plot arguments
625941bf60cbc95b062c6478
def _db_to_form_schema(self, group_type=None): <NEW_LINE> <INDENT> return lookup_group_plugin(group_type).db_to_form_schema()
This is an interface to manipulate data from the database into a format suitable for the form (optional)
625941bfd4950a0f3b08c287
def matches(self, buf): <NEW_LINE> <INDENT> buf_length = len(buf) <NEW_LINE> for i in range(buf_length): <NEW_LINE> <INDENT> if buf[i] == self.pattern: <NEW_LINE> <INDENT> return buf[i], buf[i+1:] <NEW_LINE> <DEDENT> <DEDENT> return None, []
Match a single word from the buffer consuming the prior portion greedily. >>> buf = "the dog ran away".split() >>> buf.reverse() >>> Pattern("dog").matches(buf) ('dog', ['the'])
625941bf6e29344779a6254a
def part1(): <NEW_LINE> <INDENT> input_list = read_input('input.txt') <NEW_LINE> return sum(map(fuel_required, input_list))
Solve the puzzle (part 1), given the input in input.txt
625941bff9cc0f698b140533
def write_temporary_csv_files(self): <NEW_LINE> <INDENT> temp_files = os.listdir(self.temp_dir) <NEW_LINE> counter = 0 <NEW_LINE> for file in [f for f in temp_files if f[-3:] == 'txt']: <NEW_LINE> <INDENT> with open(self.temp_dir + file) as fin, open(self.temp_dir + str(counter) + '_final.csv', 'w') as fout: <NEW_LINE> <INDENT> for line in fin: <NEW_LINE> <INDENT> fout.write(line.replace('BERTH_9_FEEDBACK,BERTH_10_FEEDBACK,', 'BERTH_9_FEEDBACK,BERTH_10_FEEDBACK,NothingToSeeHere,')) <NEW_LINE> <DEDENT> counter += 1
Helper method for makeAllDataDF() Creates a bunch of csv files from the txt files created in makeTemporaryTXTFilesForCSV() These are processed specifically for the format that is produced by the train. :return: None
625941bf3cc13d1c6d3c72b1
def __getitem__(self, index)->Position: <NEW_LINE> <INDENT> if isinstance(index, slice): <NEW_LINE> <INDENT> return self._path[index] <NEW_LINE> <DEDENT> elif isinstance(index, int): <NEW_LINE> <INDENT> if index >= 0 and index < len(self): <NEW_LINE> <INDENT> return self._path[index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IndexError(f"{__class__}.__getitem__ : Index ({index}) is out of range.")
Retourne la position correspondant à l'index :param index: index de la position à retourner :type index: type `int` ou `slice` :return: un objet ou une liste d'objets `Position` correspondant à l'index :rtype: * `Position` si un seul objet correspont à l'index, * `list` (d'objets ) si plusieurs objets ocrrespond à `index` :except IndexError: si `ìndex` est en dehors des bornes de la liste `self._path` *Exemple :* * si chemin = Path(...) * `chemin[i]` retourne l'objet `Position` qui se trouve à la position `i+1` (index `i`) dans `chemin`
625941bfa79ad161976cc07b
def update_potential_energy(self): <NEW_LINE> <INDENT> E_b = 0 <NEW_LINE> E_nb = 0 <NEW_LINE> for atom in self.num_atoms: <NEW_LINE> <INDENT> dist, mask = self.get_dist(atom) <NEW_LINE> ks_vec = self.set_ks(atom) <NEW_LINE> kb_mask = ks_vec==self.sim_params['kB'] <NEW_LINE> knb_mask = ks_vec==self.sim_params['kN'] <NEW_LINE> E_b += np.sum(0.5*ks_vec*(dist-self.b0[atom])*(dist-self.b0[atom])*mask*kb_mask) <NEW_LINE> E_nb += np.sum(0.5*ks_vec*(dist-self.b0[atom])*(dist-self.b0[atom])*mask*knb_mask) <NEW_LINE> <DEDENT> return E_b*0.5 ,E_nb*0.5
A method to output potential and bonded and non bonded energy It basically iterates of the atoms and sums up the potential energy between bonded and non bonded pairs. input: none returns: vectors for bonded and non bonded energies
625941bf5f7d997b871749cb
def delete_nodes(self, lb_id, node_ids, current_timestamp): <NEW_LINE> <INDENT> if not node_ids: <NEW_LINE> <INDENT> resp = { "message": "Must supply one or more id's to process this request.", "code": 400} <NEW_LINE> return resp, 400 <NEW_LINE> <DEDENT> if lb_id not in self.lbs: <NEW_LINE> <INDENT> return not_found_response("loadbalancer"), 404 <NEW_LINE> <DEDENT> _verify_and_update_lb_state(self, lb_id, False, current_timestamp) <NEW_LINE> if self.lbs[lb_id]["status"] != "ACTIVE": <NEW_LINE> <INDENT> resp = {"message": "LoadBalancer is not ACTIVE", "code": 422} <NEW_LINE> return resp, 422 <NEW_LINE> <DEDENT> all_ids = [node["id"] for node in self.lbs[lb_id].get("nodes", [])] <NEW_LINE> non_nodes = set(node_ids).difference(all_ids) <NEW_LINE> if non_nodes: <NEW_LINE> <INDENT> nodes = ','.join(map(str, non_nodes)) <NEW_LINE> resp = { "validationErrors": { "messages": [ "Node ids {0} are not a part of your loadbalancer".format(nodes) ] }, "message": "Validation Failure", "code": 400, "details": "The object is not valid"} <NEW_LINE> return resp, 400 <NEW_LINE> <DEDENT> for node_id in node_ids: <NEW_LINE> <INDENT> assert _delete_node(self, lb_id, node_id) is True <NEW_LINE> <DEDENT> _verify_and_update_lb_state(self, lb_id, current_timestamp=current_timestamp) <NEW_LINE> return EMPTY_RESPONSE, 202
Bulk-delete multiple LB nodes.
625941bf460517430c3940c1
@contextlib.contextmanager <NEW_LINE> def transaction(storage, **kwargs): <NEW_LINE> <INDENT> with storage.transaction(**kwargs) as store: <NEW_LINE> <INDENT> session = Session(store) <NEW_LINE> try: <NEW_LINE> <INDENT> yield session <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> session.flush()
Run a session for a given `StorageEngine`.
625941bff9cc0f698b140534
def test_view_success(self): <NEW_LINE> <INDENT> resp = self.api_get(f"/user/by_name/{self.current_user.username}") <NEW_LINE> body = get_body_json(resp) <NEW_LINE> self.assertEqual(resp.code, 200) <NEW_LINE> self.validate_default_success(body) <NEW_LINE> spec = self.rs.get_user_by_name_username.op_spec["responses"]["200"]["schema"] <NEW_LINE> api.validate_object(spec, body) <NEW_LINE> data = body["data"] <NEW_LINE> self.assertEqual(data["username"], self.current_username)
查看成功
625941bf283ffb24f3c5583a
def get_dependency_element(self, symbol): <NEW_LINE> <INDENT> for depend in self.dependencies: <NEW_LINE> <INDENT> if "." in depend: <NEW_LINE> <INDENT> if depend.split(".")[1] == symbol.lower(): <NEW_LINE> <INDENT> found = self.parent.get_executable(depend) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> fullname = "{}.{}".format(depend, symbol) <NEW_LINE> if self.parent.get_executable(fullname) is not None: <NEW_LINE> <INDENT> found = self.parent.get_executable(fullname) <NEW_LINE> break <NEW_LINE> <DEDENT> if self.parent.get_interface(fullname) is not None: <NEW_LINE> <INDENT> found = self.parent.get_interface(fullname) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return found
Checks if the specified symbol is the name of one of the methods that this module depends on. If it is, search for the actual code element and return it.
625941bf91f36d47f21ac426
def require_min_python_version(minimal_version): <NEW_LINE> <INDENT> import six <NEW_LINE> import sys <NEW_LINE> python_version = sys.version_info <NEW_LINE> if isinstance(minimal_version, six.string_types): <NEW_LINE> <INDENT> python_version = "%s.%s" % sys.version_info[:2] <NEW_LINE> <DEDENT> elif not isinstance(minimal_version, tuple): <NEW_LINE> <INDENT> raise TypeError("string or tuple (was: %s)" % type(minimal_version)) <NEW_LINE> <DEDENT> if python_version < minimal_version: <NEW_LINE> <INDENT> raise ConstraintError("python >= %s expected (was: %s)" % (minimal_version, python_version))
Simplifies to specify the minimal python version that is required. :param minimal_version: Minimum version (as string, tuple) :raises: behave.exception.ConstraintError
625941bf50485f2cf553cccf
def test_three_dirs(self): <NEW_LINE> <INDENT> self.assertEqual( eap.ProcessEapHomeCat.process( ansible_results([ {'item': 'dir1', 'stdout': self.cat_result}, {'item': 'dir2', 'rc': 1, 'stdout': self.cat_result}, {'item': 'dir3', 'stdout': 'foo'}])), {'dir1': True, 'dir2': False, 'dir3': False})
A directory can have three outcomes.
625941bf4428ac0f6e5ba727
def updateReductionFactors(self): <NEW_LINE> <INDENT> chiN= self.getCompressiveStrengthReductionFactor() <NEW_LINE> chiLT= self.getFlexuralStrengthReductionFactor() <NEW_LINE> for e in self.elemSet: <NEW_LINE> <INDENT> e.setProp('chiLT',chiLT) <NEW_LINE> e.setProp('chiN',chiN)
Update the value of the appropriate reduction factors.
625941bf10dbd63aa1bd2adc
def test_user_settings(self): <NEW_LINE> <INDENT> user = User.objects.get(username='user') <NEW_LINE> form = forms.NotificationsForm(instance=user) <NEW_LINE> self.assertEqual(len(form.fields['notifications'].choices), len(form.CHOICES)) <NEW_LINE> self.assertEqual(form.initial, { 'notifications': [ 'video_approved', 'video_comment', 'comment_post_comment', 'newsletter' ] })
A regular user should only see the 'video_approved', 'video_comment', and 'newsletter' notifications. The initial data for the form should have those settings enabled, since they're on by default.
625941bfd10714528d5ffc17
def get_geocodeur(): <NEW_LINE> <INDENT> global GEOCODEUR <NEW_LINE> if not GEOCODEUR: <NEW_LINE> <INDENT> GEOCODEUR = connectionGeocodeur("Nominatim") <NEW_LINE> <DEDENT> return GEOCODEUR
Connection au geocodeur unique
625941bf21a7993f00bc7c22
def getMass(self, pName): <NEW_LINE> <INDENT> if not self.proxy: <NEW_LINE> <INDENT> self.proxy = self.session.service("ALMotion") <NEW_LINE> <DEDENT> return self.proxy.getMass(pName)
Gets the mass of a joint, chain, "Body" or "Joints". :param str pName: Name of the body which we want the mass. "Body", "Joints" and "Com" give the total mass of nao. For the chain, it gives the total mass of the chain. :returns float: The mass in kg.
625941bf4e4d5625662d4311
def process(sender, reader): <NEW_LINE> <INDENT> info = yaml.load(reader) <NEW_LINE> for record in info: <NEW_LINE> <INDENT> address, subject, body = make_message(record) <NEW_LINE> sender(address, subject, body)
Process the YAML data loaded through reader.
625941bf66673b3332b91fc7
def get_stock_quote_by_id(stock_id, info=None): <NEW_LINE> <INDENT> url = urls.marketdata_quotes(stock_id) <NEW_LINE> data = helper.request_get(url) <NEW_LINE> return (helper.filter_data(data, info))
Represents basic stock quote information :param stock_id: robinhood stock id :type stock_id: str :param info: Will filter the results to get a specific value. Possible options are url, instrument, execution_date, divsor, and multiplier. :type info: Optional[str] :return: [dict] If the info parameter is provided, then the function will extract the value of the key that matches the info parameter. Otherwise, the whole dictionary is returned. :Dictionary Keys: * ask_price * ask_size * bid_price * bid_size * last_trade_price * last_extended_hours_trade_price * previous_close * adjusted_previous_close * previous_close_date * symbol * trading_halted * has_traded * last_trade_price_source * updated_at * instrument
625941bf15baa723493c3eaa
def __contains__(self, key): <NEW_LINE> <INDENT> if isinstance(key, str): <NEW_LINE> <INDENT> key = (key,) <NEW_LINE> <DEDENT> targ = self.node <NEW_LINE> for part in key: <NEW_LINE> <INDENT> targ.isa() <NEW_LINE> if part not in targ: return False <NEW_LINE> targ = targ[part] <NEW_LINE> <DEDENT> return True
Returns True if the given exists in the directory structure, False if it does not.
625941bf5fcc89381b1e15f3
def test_is_unicode_enabled(self): <NEW_LINE> <INDENT> self.is_unicode_enabled_p.stop() <NEW_LINE> def capability_unicode_supported(): <NEW_LINE> <INDENT> return ['dummy', 'display-name'] <NEW_LINE> <DEDENT> def capability_unicode_unsupported(): <NEW_LINE> <INDENT> return ['dummy'] <NEW_LINE> <DEDENT> patch_supported = mock.patch( SERVER_GET_CAPABILITIES, side_effect=capability_unicode_supported) <NEW_LINE> patch_unsupported = mock.patch( SERVER_GET_CAPABILITIES, side_effect=capability_unicode_unsupported) <NEW_LINE> sp = servermanager.ServerPool() <NEW_LINE> self.assertTrue(cfg.CONF.RESTPROXY.naming_scheme_unicode) <NEW_LINE> patch_supported.start() <NEW_LINE> self.assertTrue(sp.is_unicode_enabled()) <NEW_LINE> patch_supported.stop() <NEW_LINE> patch_unsupported.start() <NEW_LINE> self.assertFalse(sp.is_unicode_enabled()) <NEW_LINE> patch_unsupported.stop() <NEW_LINE> cfg.CONF.set_override('naming_scheme_unicode', False, 'RESTPROXY') <NEW_LINE> sp = servermanager.ServerPool() <NEW_LINE> patch_supported.start() <NEW_LINE> self.assertFalse(sp.is_unicode_enabled()) <NEW_LINE> patch_supported.stop() <NEW_LINE> patch_unsupported.start() <NEW_LINE> self.assertFalse(sp.is_unicode_enabled()) <NEW_LINE> patch_unsupported.stop()
Verify that unicode is enabled only when both conditions are True: 1. naming_scheme_unicode is True or empty 2. BCF capabilities include display-name :return:
625941bf956e5f7376d70da5
def __init__(self, rule_id, obj_id, status, message='', exc=None): <NEW_LINE> <INDENT> self.rule_id = rule_id <NEW_LINE> self.obj_id = obj_id <NEW_LINE> self.status = status <NEW_LINE> self.message = message <NEW_LINE> self.exc = exc
:param rule_id: Rule that executed :param obj_id: Object checked (in json-pointer format) :param status: PASSED/FAILED/DISABLED :param message: Optional additional information :param exc: Optional exception that was raised to cause this Result.
625941bf8da39b475bd64ea7
def test_reject_moderation_view(self): <NEW_LINE> <INDENT> response = self.client.post(reverse('wagtailadmin_pages_reject_moderation', args=(self.revision.id, )), { 'foo': "Must post something or the view won't see this as a POST request", }) <NEW_LINE> self.assertRedirects(response, reverse('wagtailadmin_home')) <NEW_LINE> self.assertFalse(Page.objects.get(id=self.page.id).live) <NEW_LINE> self.assertFalse(PageRevision.objects.get(id=self.revision.id).submitted_for_moderation) <NEW_LINE> self.assertEqual(len(mail.outbox), 1) <NEW_LINE> self.assertEqual(mail.outbox[0].to, ['submitter@email.com']) <NEW_LINE> self.assertEqual(mail.outbox[0].subject, 'The page "Hello world!" has been rejected')
This posts to the reject moderation view and checks that the page was rejected
625941bfac7a0e7691ed4007
def check(self, url, method): <NEW_LINE> <INDENT> if url == None: <NEW_LINE> <INDENT> if self.base: <NEW_LINE> <INDENT> if method in self.getmethods() and self.base.get_real_base() .check(url, method): <NEW_LINE> <INDENT> return True, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False, HttpResponseForbidden() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if method in self.getmethods() and self.parent.get_real_base() .check(url, method): <NEW_LINE> <INDENT> return True, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False, HttpResponseForbidden() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.base: <NEW_LINE> <INDENT> passed = False <NEW_LINE> if url != "base": <NEW_LINE> <INDENT> passed, error = self.base.get_real_base() .check(url, method) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> passed = True <NEW_LINE> <DEDENT> if url in self.geturls() and method in self.geturls()[url] and passed: <NEW_LINE> <INDENT> return True, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False, HttpResponseForbidden() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> passed, error = self.parent.get_real_base().check(url, method) <NEW_LINE> if url in self.geturls() and method in self.geturls()[url] and passed: <NEW_LINE> <INDENT> return True, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False, HttpResponseForbidden()
Check Method 'method' on url 'url' is allowed by this Auth
625941bf4428ac0f6e5ba728
def __init__(self, *args): <NEW_LINE> <INDENT> _gskernel.GsDir_swiginit(self, _gskernel.new_GsDir(*args))
*Overload 1:* 拷贝构造函数 :type pDir: :py:class:`GsDir` :param pDir: 拷贝的对象 | *Overload 2:* 目录的完整路径构造对象 :type strDirPath: string :param strDirPath: 目录完整路径
625941bf63f4b57ef0001056
def ECBOracle(msg): <NEW_LINE> <INDENT> prefix = rand_prefix <NEW_LINE> postfix_str = 'Um9sbGluJyBpbiBteSA1LjAKV2l0aCBteSByYWctdG9wIGRvd24gc28gbXkgaGFpciBjYW4gYmxvdwpUaGUgZ2lybGllcyBvbiBzdGFuZGJ5IHdhdmluZyBqdXN0IHRvIHNheSBoaQpEaWQgeW91IHN0b3A/IE5vLCBJIGp1c3QgZHJvdmUgYnkK' <NEW_LINE> postfix = Message(postfix_str, 'base64') <NEW_LINE> oracle = Oracle(rand_key, prefix, postfix) <NEW_LINE> return oracle.encryptECB(msg)
Appends the string tools.postfix to a message, then encrypts the result using AES-ECB under a fixed random 16-byte key. Args: msg (Message): the message to be concatenated with the bytes of tools.postfix1 and then encrypted. Returns: Message: The encryption of the concatenation of 'msg' with the bytes of tools.postfix, using AES-ECB under a fixed random 16-byte key.
625941bf23e79379d52ee49c
@login_required <NEW_LINE> @group_required(SEC_GROUP_NAMES['edit_index']) <NEW_LINE> def edit_result(request, step='edit_finish', template='mdtui/indexing.html'): <NEW_LINE> <INDENT> required_vars = ('edit_processor_indexes', 'edit_index_barcode', 'old_document_keys', 'edit_return', "edit_mdts") <NEW_LINE> variables = {} <NEW_LINE> warnings = [] <NEW_LINE> for var in required_vars: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> variables[var] = request.session[var] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> variables[var] = '' <NEW_LINE> if not var == 'edit_return': <NEW_LINE> <INDENT> error_name = MDTUI_ERROR_STRINGS['ERROR_EDIT_INDEXES_FINISHED'] <NEW_LINE> log.error('indexing_finished error: variable: %s, %s' % (var, error_name)) <NEW_LINE> if not error_name in warnings: <NEW_LINE> <INDENT> warnings.append(error_name) <NEW_LINE> <DEDENT> <DEDENT> pass <NEW_LINE> <DEDENT> <DEDENT> log.debug('indexing_edit_result called with: step: "%s", variables: "%s",' % (step, variables)) <NEW_LINE> if request.POST: <NEW_LINE> <INDENT> code = variables['edit_index_barcode'] <NEW_LINE> processor = DocumentProcessor() <NEW_LINE> options = { 'new_indexes': variables['edit_processor_indexes'], 'user': request.user, } <NEW_LINE> processor.update(code, options=options) <NEW_LINE> if not processor.errors: <NEW_LINE> <INDENT> for var in required_vars: <NEW_LINE> <INDENT> _cleanup_session_var(request, var) <NEW_LINE> <DEDENT> return HttpResponseRedirect(variables['edit_return']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for error in processor.errors: <NEW_LINE> <INDENT> warnings.append(error) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> context_secondary_indexes = {} <NEW_LINE> if 'edit_processor_indexes' in variables.iterkeys() and variables['edit_processor_indexes']: <NEW_LINE> <INDENT> for index, value in variables['edit_processor_indexes'].iteritems(): <NEW_LINE> <INDENT> if not index in ['metadata_user_name', 'metadata_user_id']: <NEW_LINE> <INDENT> context_secondary_indexes[index] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> context = { 'step': step, 'document_keys': context_secondary_indexes, 'barcode': variables['edit_index_barcode'], 'old_document_keys': variables['old_document_keys'], 'edit_return': variables['edit_return'], 'warnings': warnings, } <NEW_LINE> return render(request, template, context)
Confirmation step for editing indexes
625941bf6fece00bbac2d673
def separate_mixture_matrix_into_parameters(mdn_output_matrix, nb_components): <NEW_LINE> <INDENT> pi = mdn_output_matrix[:, :nb_components] <NEW_LINE> mu = mdn_output_matrix[:, nb_components:2 * nb_components] <NEW_LINE> sigma = mdn_output_matrix[:, 2 * nb_components:] <NEW_LINE> return pi, mu, sigma
This function takes the output matrix of a mixture density network and separates the output matrix into three separate individual matrices: the mixture coefficient matrix, the means matrix, and the standard deviations matrix.
625941bf5166f23b2e1a508f
def _add_new_atoms(self, topology, missing_atoms, residue_map): <NEW_LINE> <INDENT> new_atoms = list() <NEW_LINE> for k, residue_ent in enumerate(residue_map): <NEW_LINE> <INDENT> residue = residue_ent[0] <NEW_LINE> replace_with = residue_ent[1] <NEW_LINE> residue.name = replace_with <NEW_LINE> template = self._templates[replace_with] <NEW_LINE> try: <NEW_LINE> <INDENT> for atom in missing_atoms[residue]: <NEW_LINE> <INDENT> new_atom = topology.addAtom(atom.name, atom.element, residue) <NEW_LINE> new_atoms.append(new_atom) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> new_res_atoms = dict() <NEW_LINE> for atom in residue.atoms(): <NEW_LINE> <INDENT> new_res_atoms[atom.name] = atom <NEW_LINE> <DEDENT> new_res_bonds = list() <NEW_LINE> for bond in topology._bonds: <NEW_LINE> <INDENT> if bond[0].residue == residue and bond[1].residue == residue: <NEW_LINE> <INDENT> new_res_bonds.append((bond[0].name, bond[1].name)) <NEW_LINE> <DEDENT> <DEDENT> template_bonds = [(template.atoms[bond[0]].name, template.atoms[bond[1]].name) for bond in template.bonds] <NEW_LINE> for bond in new_res_bonds: <NEW_LINE> <INDENT> if bond not in template_bonds and (bond[1],bond[0]) not in template_bonds: <NEW_LINE> <INDENT> bonded_0 = new_res_atoms[bond[0]] <NEW_LINE> bonded_1 = new_res_atoms[bond[1]] <NEW_LINE> topology._bonds.remove((bonded_0, bonded_1)) <NEW_LINE> <DEDENT> <DEDENT> for bond in template_bonds: <NEW_LINE> <INDENT> if bond not in new_res_bonds and (bond[1],bond[0]) not in new_res_bonds: <NEW_LINE> <INDENT> new_bonded_0 = new_res_atoms[bond[0]] <NEW_LINE> new_bonded_1 = new_res_atoms[bond[1]] <NEW_LINE> topology.addBond(new_bonded_0, new_bonded_1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> topology._numAtoms = len(list(topology.atoms())) <NEW_LINE> return topology
add new atoms (and corresponding bonds) to new residues Arguments --------- topology : simtk.openmm.app.Topology extra atoms from old residue have been deleted, missing atoms in new residue not yet added missing_atoms : dict key : simtk.openmm.app.topology.Residue value : list(simtk.openmm.app.topology._TemplateAtomData) residue_map : list(tuples) simtk.openmm.app.topology.Residue, str (three letter residue name of new residue) Returns ------- topology : simtk.openmm.app.Topology new residue has all correct atoms for desired mutation
625941bf5fc7496912cc38b4
def _xx(x): <NEW_LINE> <INDENT> if logx: <NEW_LINE> <INDENT> return 10**x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x
Backward data transformation for x axis
625941bfb7558d58953c4e4f
def Get(key): <NEW_LINE> <INDENT> if key is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> namespaced_key = namespaced_stored_object.NamespaceKey(key) <NEW_LINE> entity = ndb.Key('CachedPickledString', namespaced_key).get( read_policy=ndb.EVENTUAL_CONSISTENCY) <NEW_LINE> if entity: <NEW_LINE> <INDENT> return cPickle.loads(entity.value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return stored_object.Get(key)
Gets the value from the datastore.
625941bff7d966606f6a9f38
def findAll(c, string): <NEW_LINE> <INDENT> findList = [] <NEW_LINE> ctr = 0 <NEW_LINE> stringList = list(string) <NEW_LINE> for char in stringList: <NEW_LINE> <INDENT> if c == char: <NEW_LINE> <INDENT> findList.append(ctr) <NEW_LINE> ctr += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ctr += 1 <NEW_LINE> <DEDENT> <DEDENT> return findList
Finds all indicies/instances of a character in a string Given a string and a character c, the method finds all instances/indicies of the character and returns a list with the indicies of where the character occurs. Args: c: The character passed in that the method will find string: The string to iterate over and find the character in Returns: returns a list findList that contains the indicies of where the character occurs Raises: N/A
625941bfadb09d7d5db6c6c8
def numerical_gradient(score, R, eps=1e-8): <NEW_LINE> <INDENT> num = R * 0 <NEW_LINE> val = score(R) <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> for j in range(3): <NEW_LINE> <INDENT> R[i,j] += eps <NEW_LINE> num[i,j] = (score(R)-val) / eps <NEW_LINE> R[i,j] -= eps <NEW_LINE> <DEDENT> <DEDENT> return num
Calculate numerical gradient of scoring function with respect to rotation matrix
625941bf435de62698dfdb82
def get_t(df, expire_datetime): <NEW_LINE> <INDENT> return pd.Series(_get_t_series(df["datetime"], df["duration"], expire_datetime))
计算 K 线序列对应的年化到期时间,主要用于计算期权相关希腊指标时,需要得到计算出序列对应的年化到期时间 Args: df (pandas.DataFrame): Dataframe 格式的 K 线序列 expire_datetime (int): 到期日, 秒级时间戳 Returns: pandas.Series : 返回的 df 对应的年化时间序列 Example:: from tqsdk import TqApi, TqAuth, tafunc api = TqApi(auth=TqAuth("信易账户", "账户密码")) quote = api.get_quote('SHFE.cu2006C45000') klines = api.get_kline_serial(['SHFE.cu2006C45000', 'SHFE.cu2006'], 24 * 60 * 60, 50) t = tafunc.get_t(klines, quote.expire_datetime) print(t) api.close()
625941bf07f4c71912b113b7
def _get_ios_sdk_version(self, target_platform: Platform) -> str: <NEW_LINE> <INDENT> return self.ios_sdk or self._get_latest_ios_sdk_version( target_platform=target_platform)
Get the iOS SDK version to use: either the one the user said, or the latest we can find.
625941bf23e79379d52ee49d
def valid(self, queen_str, current_queen): <NEW_LINE> <INDENT> rows = len(queen_str) <NEW_LINE> flag = False <NEW_LINE> for row in range(rows): <NEW_LINE> <INDENT> if abs(current_queen - int(queen_str[row])) in (0, rows - row): <NEW_LINE> <INDENT> flag = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return flag
:param queen_str: 当前皇后以前所存的皇后的列的位置 :param current_queen: 当前皇后的位置(列) :return: flag: 当前位置的皇后是否与之前所有位置的皇后有冲突
625941bf7c178a314d6ef392