code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def discord_toggle_pins(): <NEW_LINE> <INDENT> pass
Toggle pins popout
625941bd21a7993f00bc7be8
def avg_over_season(df, stn, att, start=19010101, end=10991231): <NEW_LINE> <INDENT> att_arr, t_arr = attribute_over_time(df, stn, att, start, end) <NEW_LINE> t_arr %= 10000 <NEW_LINE> seasons = [] <NEW_LINE> att_avg = [] <NEW_LINE> for season in np.array(["winter", "spring", "summer", "autumn"]): <NEW_LINE> <INDENT> s_start, s_end = SEASONS[season] <NEW_LINE> att_data = [] <NEW_LINE> if season == "winter": <NEW_LINE> <INDENT> att_data = att_arr[(t_arr >= s_start) | (t_arr <= s_end)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> att_data = att_arr[(s_start <= t_arr) & (t_arr <= s_end)] <NEW_LINE> <DEDENT> if len(att_data) > 1: <NEW_LINE> <INDENT> att_avg.append(np.mean(att_data)) <NEW_LINE> seasons.append(season) <NEW_LINE> <DEDENT> <DEDENT> return np.array(att_avg), np.array(seasons)
Returns a tuple of the values of the given attribute and the seasons. df = pandas.DataFrame, all the data extracted from your csv file. stn = station, the number or the name of the station. att = attribute start = start date end = end date
625941bd566aa707497f4470
def send_message(self,user,message_name): <NEW_LINE> <INDENT> if not karacos.core.mail.valid_email(user['name']): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if 'validation' not in user: <NEW_LINE> <INDENT> user['validation'] = "%s" % uuid4().hex <NEW_LINE> <DEDENT> user.save() <NEW_LINE> message = MIMEMultipart() <NEW_LINE> message['From'] = self['mail_register_from_addr'] <NEW_LINE> message['To'] = user['name'] <NEW_LINE> if '%s_subject' % message_name not in self: <NEW_LINE> <INDENT> message['Subject'] = _("Email de confirmation") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message['Subject'] = self['%s_subject' % message_name] <NEW_LINE> <DEDENT> template = None <NEW_LINE> if 'mail_template_%s' % message_name not in self: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> template = self.__domain__.lookup.get_template('%s/nl_templates/%s' % (self.__domain__.get_site_theme_base(),message_name)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> template = self.__domain__.lookup.get_template('/menestrel/nl_templates/%s' % message_name) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.log.log_exc(sys.exc_info(),'error') <NEW_LINE> self.log.error("Mail template '%s' not found, mail not sent" % message_name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> template = Template(self['mail_template_%s' % message_name]) <NEW_LINE> <DEDENT> body = template.render(instance=self,user=user) <NEW_LINE> message.attach(MIMEText(body, 'html')) <NEW_LINE> self.log.debug("sending mail : %s,%s" % (user['name'],user['validation'])) <NEW_LINE> try: <NEW_LINE> <INDENT> karacos.core.mail.send_mail(user['name'],message.as_string()) <NEW_LINE> self.log.info("mail successfully sent to %s" % user['name']) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.log.warn("error while sending mail to %s" % user['name']) <NEW_LINE> self.log.log_exc( sys.exc_info(),'warn') <NEW_LINE> <DEDENT> return True
send given mail message to given user
625941bd091ae35668666e60
@user_app.put('/psw', apply=use_kwargs(psw_args)) <NEW_LINE> @utils.permissions_auth("put:/v1/sys/user/psw") <NEW_LINE> def psw(old_psw, new_psw): <NEW_LINE> <INDENT> token_util = utils.TokenUtil() <NEW_LINE> user_id = token_util.get_user_id() <NEW_LINE> user = SysUserService.get_user(user_id) <NEW_LINE> if user.password == utils.md5_salt(old_psw, user.username): <NEW_LINE> <INDENT> rowid = SysUserService.update_pwd(user_id, utils.md5_salt(new_psw, user.username)) <NEW_LINE> if rowid > 0: <NEW_LINE> <INDENT> redis_util = utils.RedisUtil() <NEW_LINE> redis_util.deltele("{}:{}".format(conf.prefix_token, user_id)) <NEW_LINE> return success_result() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return error_result(msg="修改密码失败") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return error_result(msg="原密码不正确")
修改密码 :param old_psw: :param new_psw: :return:
625941bd9c8ee82313fbb671
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkInPlaceImageFilterICVF22IF2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
New() -> itkInPlaceImageFilterICVF22IF2 Create a new object of the class itkInPlaceImageFilterICVF22IF2 and set the input and the parameters if some named or non-named arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects - the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. Ex: itkInPlaceImageFilterICVF22IF2.New( reader, Threshold=10 ) is (most of the time) equivalent to: obj = itkInPlaceImageFilterICVF22IF2.New() obj.SetInput( 0, reader.GetOutput() ) obj.SetThreshold( 10 )
625941bdadb09d7d5db6c68e
def executor(func_or_executor: Union[Executor, str, Callable[..., T_Retval]]) -> Union[WrappedCallable, Callable[..., WrappedCallable]]: <NEW_LINE> <INDENT> def outer(func: Callable[..., T_Retval], executor: Union[Executor, str] = None) -> Callable[..., Awaitable[T_Retval]]: <NEW_LINE> <INDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> loop = get_running_loop() <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> if isinstance(executor, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ctx = next(obj for obj in args[:2] if isinstance(obj, Context)) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise RuntimeError('the callable needs to be called with a Context as the ' 'first or second positional argument') <NEW_LINE> <DEDENT> _executor = ctx.require_resource(Executor, executor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _executor = executor <NEW_LINE> <DEDENT> callback = partial(func, *args, **kwargs) <NEW_LINE> return loop.run_in_executor(_executor, callback) <NEW_LINE> <DEDENT> assert check_argument_types() <NEW_LINE> assert not inspect.iscoroutinefunction(func), 'Cannot wrap coroutine functions to be run in an executor' <NEW_LINE> return wraps(func)(wrapper) <NEW_LINE> <DEDENT> if isinstance(func_or_executor, (str, Executor)): <NEW_LINE> <INDENT> return partial(outer, executor=func_or_executor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return outer(func_or_executor)
Decorate a function to run in an executor. If no executor (or ``None``) is given, the current event loop's default executor is used. Otherwise, the argument must be a PEP 3148 compliant thread pool executor or the name of an :class:`~concurrent.futures.Executor` instance. If a decorated callable is called in a worker thread, the executor argument is ignored and the wrapped function is called directly. Callables wrapped with this decorator must be used with ``await`` when called in the event loop thread. Example use with the default executor (``None``):: @executor def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) With a named :class:`~concurrent.futures.Executor` resource:: @executor('special_ops') def this_runs_in_threadpool(ctx): return do_something_cpu_intensive() async def request_handler(ctx): result = await this_runs_in_threadpool(ctx) :param func_or_executor: either a callable (when used as a decorator), an executor instance or the name of an :class:`~concurrent.futures.Executor` resource
625941bd21bff66bcd684851
def add_letter_to_axis(ax, let, x, y, height): <NEW_LINE> <INDENT> for polygon, color in zip(letters_polygons[let], colors[let]): <NEW_LINE> <INDENT> new_polygon = affinity.scale( polygon, yfact=height, origin=(0, 0, 0)) <NEW_LINE> new_polygon = affinity.translate( new_polygon, xoff=x, yoff=y) <NEW_LINE> patch = PolygonPatch( new_polygon, edgecolor=color, facecolor=color) <NEW_LINE> ax.add_patch(patch) <NEW_LINE> <DEDENT> return
Add 'let' with position x,y and height height to matplotlib axis 'ax'.
625941bdcdde0d52a9e52f2c
def move_to_coord(self, x, y, omega=None): <NEW_LINE> <INDENT> warnings.warn( "Deprecated method, call move_to_beam instead", DeprecationWarning ) <NEW_LINE> return self.move_to_beam(x, y, omega)
Descript. : function to create a centring point based on all motors positions.
625941bdde87d2750b85fc8c
def getRow(self, rowIndex): <NEW_LINE> <INDENT> output = [[1], [1, 1]] <NEW_LINE> for i in range(rowIndex+1): <NEW_LINE> <INDENT> if i > 1: <NEW_LINE> <INDENT> tmp = [] <NEW_LINE> tmp.append(1) <NEW_LINE> for j in range(1, i): <NEW_LINE> <INDENT> tmp.append(output[i-1][j-1] + output[i-1][j]) <NEW_LINE> <DEDENT> tmp.append(1) <NEW_LINE> output.append(tmp) <NEW_LINE> <DEDENT> <DEDENT> return output[rowIndex]
:type rowIndex: int :rtype: List[int]
625941bd6fb2d068a760ef97
def __call__(self, environ, start_response): <NEW_LINE> <INDENT> return self.wsgi_app(environ, start_response)
This makes Loris executable.
625941bd15baa723493c3e70
def _preprocess_mapping(self, mapping): <NEW_LINE> <INDENT> m = mapping <NEW_LINE> for key, value in m.items(): <NEW_LINE> <INDENT> if isinstance(value, basestring): <NEW_LINE> <INDENT> m[key] = mapper.value(value) <NEW_LINE> <DEDENT> elif isinstance(value, mapper.dbmapper): <NEW_LINE> <INDENT> value.set_parent(self) <NEW_LINE> <DEDENT> elif isinstance(value, CreateChilds): <NEW_LINE> <INDENT> for c in value.get_childs(): <NEW_LINE> <INDENT> self._preprocess_mapping(c) <NEW_LINE> for ckey, cvalue in c.items(): <NEW_LINE> <INDENT> new_key = '%s/%s' % (key, ckey) <NEW_LINE> if new_key not in m: <NEW_LINE> <INDENT> m[new_key] = [] <NEW_LINE> <DEDENT> m[new_key].append(cvalue) <NEW_LINE> <DEDENT> <DEDENT> del m[key] <NEW_LINE> <DEDENT> <DEDENT> return m
Preprocess the mapping : after the preprocces, everything is callable in the val of the dictionary use to allow syntaxical sugar like 'field': 'external_field' instead of 'field' : value('external_field')
625941bdbd1bec0571d90534
def monkey_patch(): <NEW_LINE> <INDENT> if not CONF.monkey_patch: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for module_and_decorator in CONF.monkey_patch_modules: <NEW_LINE> <INDENT> module, decorator_name = module_and_decorator.split(':') <NEW_LINE> decorator = importutils.import_class(decorator_name) <NEW_LINE> __import__(module) <NEW_LINE> module_data = pyclbr.readmodule_ex(module) <NEW_LINE> for key in module_data.keys(): <NEW_LINE> <INDENT> if isinstance(module_data[key], pyclbr.Class): <NEW_LINE> <INDENT> clz = importutils.import_class("%s.%s" % (module, key)) <NEW_LINE> for method, func in inspect.getmembers(clz, inspect.ismethod): <NEW_LINE> <INDENT> setattr(clz, method, decorator("%s.%s.%s" % (module, key, method), func)) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(module_data[key], pyclbr.Function): <NEW_LINE> <INDENT> func = importutils.import_class("%s.%s" % (module, key)) <NEW_LINE> setattr(sys.modules[module], key, decorator("%s.%s" % (module, key), func))
If the CONF.monkey_patch set as True, this function patches a decorator for all functions in specified modules. You can set decorators for each modules using CONF.monkey_patch_modules. The format is "Module path:Decorator function". Example: 'prototype.api.ec2.cloud:prototype.notifications.notify_decorator' Parameters of the decorator is as follows. (See prototype.notifications.notify_decorator) name - name of the function function - object of the function
625941bdd18da76e235323d0
def next(self): <NEW_LINE> <INDENT> if self._epoch < 0: <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> if self.drained(): <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> while not self._exit: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sample = self._outq.get(timeout=3) <NEW_LINE> <DEDENT> except Empty as e: <NEW_LINE> <INDENT> if not self._consumer_healthy(): <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if isinstance(sample, EndSignal): <NEW_LINE> <INDENT> self._consumer_endsig[sample.id] = sample <NEW_LINE> logger.warn("recv endsignal from outq with errmsg[{}]" .format(sample.errmsg)) <NEW_LINE> if len(self._consumer_endsig.keys()) < len(self._consumers): <NEW_LINE> <INDENT> self._inq.put(sample) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._exit = True <NEW_LINE> raise StopIteration("all consumers exited, no more samples") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._consumed += 1 <NEW_LINE> return sample <NEW_LINE> <DEDENT> <DEDENT> raise StopIteration()
get next transformed sample
625941bd6e29344779a62511
def create_user(self, email, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('Users must have an email address') <NEW_LINE> <DEDENT> user = self.model( email=self.normalize_email(email), ) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user
Creates and saves a MBUser with the given email and password.
625941bd67a9b606de4a7db9
def squares(self,movement=dont_displace, rotation=dont_rotate, position=None): <NEW_LINE> <INDENT> if position is None: <NEW_LINE> <INDENT> position = self.pos <NEW_LINE> <DEDENT> pos = complex(*movement(position)) <NEW_LINE> qua_rots = int(rotation(self.ori)/90) <NEW_LINE> cmx_rotation = power(complex(0,1),qua_rots) <NEW_LINE> for square in self.shape.cx_squares: <NEW_LINE> <INDENT> cmx_sq = (self.shape.mid_pt + pos + (square - self.shape.mid_pt) * cmx_rotation) <NEW_LINE> yield (ceil(cmx_sq.real),ceil(cmx_sq.imag))
Yields all the squares of the piece on the screen. Input is taken as functions of position, and of orientation. If a position is given, the function yields squares if the piece were there.
625941bd0fa83653e4656eb9
def mask_image_by_color(image, color): <NEW_LINE> <INDENT> pixels = image.load() <NEW_LINE> masked_pixels = [] <NEW_LINE> for w in range(image.size[0]): <NEW_LINE> <INDENT> for h in range(image.size[1]): <NEW_LINE> <INDENT> if pixels[w, h] != color: <NEW_LINE> <INDENT> pixels[w, h] = (0, 0, 0) <NEW_LINE> masked_pixels.append((w,h)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return masked_pixels
Mask the input image according to the rgb color. ALl pixels that do not have this color will be set to black. :param image: the PIL image to mask :param color: the RGB color as (r, g, b) tuple
625941bd6fece00bbac2d639
def _import_race(self): <NEW_LINE> <INDENT> self._import_table_data('chrRaces', Race, [ ('raceID', 'pk', None), ('raceName', 'name', None), ('description', 'description', _string_null_to_empty), ('shortDescription', 'description_short', _string_null_to_empty), ])
Import from chrRaces table to Race model.
625941bd377c676e912720a6
def test_upload_content(self): <NEW_LINE> <INDENT> name = gen_string('alpha', 15) <NEW_LINE> try: <NEW_LINE> <INDENT> new_repo = self._make_repository({'name': name}) <NEW_LINE> <DEDENT> except CLIFactoryError as err: <NEW_LINE> <INDENT> self.fail(err) <NEW_LINE> <DEDENT> ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file="/tmp/{0}".format(RPM_TO_UPLOAD)) <NEW_LINE> result = Repository.upload_content({ 'name': new_repo['name'], 'path': "/tmp/{0}".format(RPM_TO_UPLOAD), 'product-id': new_repo['product']['id'], 'organization': new_repo['organization'], }) <NEW_LINE> self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) <NEW_LINE> self.assertEqual( len(result.stderr), 0, "No error was expected") <NEW_LINE> self.assertIn("Successfully uploaded file '{0}'" ''.format(RPM_TO_UPLOAD), result.stdout[0]['message'])
@Test: Create repository and upload content @Feature: Repository @Assert: upload content is successful
625941bdab23a570cc25007d
def __call__(self): <NEW_LINE> <INDENT> vmin, vmax = self.axis.get_view_interval() <NEW_LINE> vmin, vmax = mtransforms.nonsingular(vmin, vmax, expander = 0.05) <NEW_LINE> if vmax<vmin: <NEW_LINE> <INDENT> vmin, vmax = vmax, vmin <NEW_LINE> <DEDENT> if (vmin, vmax) in self.presets: <NEW_LINE> <INDENT> return self.presets[(vmin, vmax)] <NEW_LINE> <DEDENT> if self.numticks is None: <NEW_LINE> <INDENT> self._set_numticks() <NEW_LINE> <DEDENT> if self.numticks==0: return [] <NEW_LINE> ticklocs = np.linspace(0, vmax, self.numticks) <NEW_LINE> return self.raise_if_exceeds(ticklocs)
Return the locations of the ticks
625941bdcb5e8a47e48b79ab
def _parse_cell_values(self, values, col_metadata, subject, predicate): <NEW_LINE> <INDENT> if 'ordered' in col_metadata and col_metadata['ordered'] is True and len(values) > 1: <NEW_LINE> <INDENT> next_item = BNode() <NEW_LINE> rdf_list = next_item <NEW_LINE> values_count = len(values) <NEW_LINE> for i in range(values_count): <NEW_LINE> <INDENT> item = next_item <NEW_LINE> self.graph.add((item, RDF.first, Literal(values[i]))) <NEW_LINE> if i != values_count - 1: <NEW_LINE> <INDENT> next_item = BNode() <NEW_LINE> self.graph.add((item, RDF.rest, next_item)) <NEW_LINE> <DEDENT> <DEDENT> Collection(self.graph, rdf_list) <NEW_LINE> self.graph.add((subject, predicate, rdf_list)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for value in values: <NEW_LINE> <INDENT> object_node = self._object_node(value, col_metadata) <NEW_LINE> self.graph.add((subject, predicate, object_node))
Parses single cell value, values if 'separator' is present
625941bdd8ef3951e324343a
def valuation(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._bg_value.valuation() <NEW_LINE> <DEDENT> except (TypeError, AttributeError): <NEW_LINE> <INDENT> if self._bg_value == 0: <NEW_LINE> <INDENT> return infinity <NEW_LINE> <DEDENT> for a in range(len(self._bg_value.list())): <NEW_LINE> <INDENT> if self._bg_value.list()[a] is not 0: <NEW_LINE> <INDENT> return a
Return valuation of ``self``. EXAMPLES:: sage: R.<a,b> = PowerSeriesRing(GF(4949717)); R Multivariate Power Series Ring in a, b over Finite Field of size 4949717 sage: f = a^2 + a*b + a^3 + R.O(9) sage: f.valuation() 2 sage: g = 1 + a + a^3 sage: g.valuation() 0
625941bd3539df3088e2e248
def __init__(self, ie0, ie1, t_dist, x_space, dt, n, slope, **kwargs): <NEW_LINE> <INDENT> self.t_dist = t_dist <NEW_LINE> self.x_space = x_space <NEW_LINE> self.dt = dt <NEW_LINE> alpha = 1.0/n*sqrt(slope) <NEW_LINE> self.alpha = alpha <NEW_LINE> self.m = 5.0/3 <NEW_LINE> if 'folder' in kwargs: <NEW_LINE> <INDENT> self.folder = kwargs['folder'] <NEW_LINE> if not os.path.isdir(kwargs['folder']): <NEW_LINE> <INDENT> os.mkdir(kwargs['folder']) <NEW_LINE> os.mkdir(kwargs['folder']+'/depth_change') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> folder = 'calc_result/' + str(ie0) + 'to' + str(ie1) <NEW_LINE> if not os.path.isdir(folder): <NEW_LINE> <INDENT> os.mkdir(folder) <NEW_LINE> os.mkdir(folder + '/depth_change') <NEW_LINE> <DEDENT> self.folder = folder <NEW_LINE> <DEDENT> ie0 = ie0*10**-3/3600 <NEW_LINE> ie1 = ie1*10**-3/3600 <NEW_LINE> self.ie0 = ie0 <NEW_LINE> self.ie1 = ie1 <NEW_LINE> if 'plot' in kwargs: <NEW_LINE> <INDENT> if kwargs['plot']: <NEW_LINE> <INDENT> self.plot = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.plot = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.plot = False <NEW_LINE> <DEDENT> self.get_max_q() <NEW_LINE> self.get_max_s() <NEW_LINE> self.method = 'simpson' <NEW_LINE> if (x_space-1) % 2 != 0: <NEW_LINE> <INDENT> print('Number total column-1 is not even, using trapezoid method.') <NEW_LINE> self.method = 'trapezoid' <NEW_LINE> <DEDENT> if 'method' in kwargs: <NEW_LINE> <INDENT> method = kwargs['method'] <NEW_LINE> self.method = method
ie0: effective rainfall before rainfall intensity shifts. ie1: effective rainfall after rainfall intensity shifts. t_dist: total distance of slope(from 0 to L). x_space: number of grid points(boundaries included). dt: time space. n: manning coefficient. slope: slope for manning equation.
625941bd92d797404e304086
@register.filter(name="percentage") <NEW_LINE> def percentage(value, arg): <NEW_LINE> <INDENT> return 100.0 * divide(value, arg)
Percentage
625941bd091ae35668666e61
def configure_subparsers(subparsers): <NEW_LINE> <INDENT> parser = subparsers.add_parser( 'filter-pageid', help='Filter page IDs.', ) <NEW_LINE> parser.add_argument( '--start-id', type=int, required=True, help='Start ID.' ) <NEW_LINE> parser.add_argument( '--end-id', type=int, required=True, help='End ID.' ) <NEW_LINE> parser.set_defaults(func=main)
Configure a new subparser .
625941bd8da39b475bd64e6e
def Clear(self): <NEW_LINE> <INDENT> pass
Clear(self: DoubleArray) Removes every double from the array,rendering it empty.
625941bd851cf427c661a40f
def testStr(self): <NEW_LINE> <INDENT> self.assertEqual(str(self.hiseq2000), "SequencingMachine: Hxxxxxxxx (Test machine)")
Test ``__str__()``
625941bd6aa9bd52df036ca0
def hasNext(self): <NEW_LINE> <INDENT> if len(self.q) > 0: <NEW_LINE> <INDENT> while self.q[-1].left is not None: <NEW_LINE> <INDENT> left = self.q[-1].left <NEW_LINE> self.q[-1].left = None <NEW_LINE> self.q.append(left) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> return False
:rtype: bool
625941bd07d97122c4178783
def remove_measurement(self, measurement_id): <NEW_LINE> <INDENT> measurement = self._session.query(Measurement).filter_by(id=measurement_id).first() <NEW_LINE> if measurement: <NEW_LINE> <INDENT> self._session.delete(measurement) <NEW_LINE> self._session.commit() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("The {} is an invalid measure identifier!".format(measurement_id))
Remove the measurement.
625941bdcad5886f8bd26ede
def compute_ligand_similarity(smiles, pair): <NEW_LINE> <INDENT> (a,b) = pair <NEW_LINE> smi_a = smiles[a] <NEW_LINE> mol_a = AllChem.MolFromSmiles(smi_a) <NEW_LINE> if mol_a == None: <NEW_LINE> <INDENT> mol_a = AllChem.MolFromSmiles(smi_a, sanitize=False) <NEW_LINE> <DEDENT> fp_a = FingerprintMols.FingerprintMol(mol_a) <NEW_LINE> smi_b = smiles[b] <NEW_LINE> mol_b = AllChem.MolFromSmiles(smi_b) <NEW_LINE> if mol_b == None: <NEW_LINE> <INDENT> mol_b = AllChem.MolFromSmiles(smi_b, sanitize=False) <NEW_LINE> <DEDENT> fp_b = FingerprintMols.FingerprintMol(mol_b) <NEW_LINE> sim=fs(fp_a, fp_b) <NEW_LINE> return a, b, sim
Input a list of smiles, and a pair to compute the similarity. Returns the indices of the pair and the similarity
625941bd63f4b57ef000101d
@gen.coroutine <NEW_LINE> def play_phase(game, expected_messages): <NEW_LINE> <INDENT> while expected_messages.has_messages_to_send(): <NEW_LINE> <INDENT> yield gen.sleep(10e-6) <NEW_LINE> yield send_messages_if_needed(game, expected_messages) <NEW_LINE> <DEDENT> while expected_messages.has_messages_to_receive(): <NEW_LINE> <INDENT> yield gen.sleep(10e-6) <NEW_LINE> <DEDENT> yield send_current_orders(game)
Play a phase for a user game: #. Send messages #. wait for messages to receive #. send current orders. :param game: user game :param expected_messages: expected messages :type game: NetworkGame :type expected_messages: ExpectedMessages
625941bd50812a4eaa59c222
def parameter3(priceList): <NEW_LINE> <INDENT> firAvg = sum(priceList[:10]) <NEW_LINE> midAvg = sum(priceList[ (len(priceList)) - 5:(len(priceList)) + 5]) <NEW_LINE> lasAvg = sum(priceList[-10:]) <NEW_LINE> firAvg = firAvg / 10 <NEW_LINE> midAvg = midAvg / 10 <NEW_LINE> lasAvg = lasAvg / 10 <NEW_LINE> avgs = np.array(([firAvg, midAvg, lasAvg])) <NEW_LINE> slope = linRegress(priceList) <NEW_LINE> adjSlope = slope / avg(priceList) <NEW_LINE> return adjSlope * 100
Determines slope of three 10-day averaged points, and returns as a percent of average stock price
625941bd004d5f362079a233
def add_timing_signal_nd(x, min_timescale=1.0, max_timescale=1.0e4): <NEW_LINE> <INDENT> num_dims = len(x.get_shape().as_list()) - 2 <NEW_LINE> channels = tf_shape_list(x)[-1] <NEW_LINE> num_timescales = channels // (num_dims * 2) <NEW_LINE> log_timescale_increment = ( math.log(float(max_timescale) / float(min_timescale)) / (tf.to_float(num_timescales) - 1)) <NEW_LINE> inv_timescales = min_timescale * tf.exp( tf.to_float(tf.range(num_timescales)) * -log_timescale_increment) <NEW_LINE> for dim in range(num_dims): <NEW_LINE> <INDENT> length = tf_shape_list(x)[dim + 1] <NEW_LINE> position = tf.to_float(tf.range(length)) <NEW_LINE> scaled_time = tf.expand_dims(position, 1) * tf.expand_dims( inv_timescales, 0) <NEW_LINE> signal = tf.concat([tf.sin(scaled_time), tf.cos(scaled_time)], axis=1) <NEW_LINE> prepad = dim * 2 * num_timescales <NEW_LINE> postpad = channels - (dim + 1) * 2 * num_timescales <NEW_LINE> signal = tf.pad(signal, [[0, 0], [prepad, postpad]]) <NEW_LINE> for _ in range(1 + dim): <NEW_LINE> <INDENT> signal = tf.expand_dims(signal, 0) <NEW_LINE> <DEDENT> for _ in range(num_dims - 1 - dim): <NEW_LINE> <INDENT> signal = tf.expand_dims(signal, -2) <NEW_LINE> <DEDENT> x += signal <NEW_LINE> <DEDENT> return x
https://github.com/tensorflow/tensor2tensor/blob/120315cbe35f468876512b790dc77d792d4db72c/tensor2tensor/layers/common_attention.py#L504 Adds a bunch of sinusoids of different frequencies to a Tensor. Each channel of the input Tensor is incremented by a sinusoid of a different frequency and phase in one of the positional dimensions. This allows attention to learn to use absolute and relative positions. Timing signals should be added to some precursors of both the query and the memory inputs to attention. The use of relative position is possible because sin(a+b) and cos(a+b) can be experessed in terms of b, sin(a) and cos(a). x is a Tensor with n "positional" dimensions, e.g. one dimension for a sequence or two dimensions for an image We use a geometric sequence of timescales starting with min_timescale and ending with max_timescale. The number of different timescales is equal to channels // (n * 2). For each timescale, we generate the two sinusoidal signals sin(timestep/timescale) and cos(timestep/timescale). All of these sinusoids are concatenated in the channels dimension. Args: x: a Tensor with shape [batch, d1 ... dn, channels] min_timescale: a float max_timescale: a float Returns: a Tensor the same shape as x.
625941bd236d856c2ad446d3
def searchblock(candidate): <NEW_LINE> <INDENT> regexp = r"^/(\*|[a-zA-Z\*][a-zA-Z0-9_\*]{0,100})(/(\*|[a-zA-Z0-9_\.\-\*]{1,100})){0,1}(/(\*|[A-Z\-\*]{1,50})(#(\*|[a-zA-Z0-9\.\-_\*]){0,100}){0,1}){0,1}$" <NEW_LINE> return check(regexp, candidate)
A block name with a * wildcard one or more times in it.
625941bd57b8e32f52483397
def _add_threshold_to_model_fn(model_fn, threshold): <NEW_LINE> <INDENT> def new_model_fn(features, labels, mode, params): <NEW_LINE> <INDENT> spec = model_fn(features, labels, mode, params) <NEW_LINE> threshold_tensor = tf.constant(threshold, dtype=tf.float32) <NEW_LINE> default_serving_export = spec.export_outputs[ tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] <NEW_LINE> default_serving_export.outputs["threshold"] = threshold_tensor <NEW_LINE> return spec <NEW_LINE> <DEDENT> return new_model_fn
Adds the classifier threshold to the given model_fn.
625941bd1d351010ab855a1a
def get_network_config(self, name, index=0): <NEW_LINE> <INDENT> network = self.config['networks'][index] <NEW_LINE> if name in network: <NEW_LINE> <INDENT> return network[name] <NEW_LINE> <DEDENT> subnet = SubnetManager.get(network['subnet']) <NEW_LINE> try: <NEW_LINE> <INDENT> return getattr(subnet, name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None
Gets network config prefering instance parameters over subnet
625941bdff9c53063f47c0f3
def wait_for_rpc(self): <NEW_LINE> <INDENT> self.run_and_stop_at(TestableCopyingManager.RESPONDING) <NEW_LINE> requests = self._get_captured_request() <NEW_LINE> def send_response(status_message): <NEW_LINE> <INDENT> self._set_response(status_message) <NEW_LINE> self.run_and_stop_at(TestableCopyingManager.SLEEPING) <NEW_LINE> <DEDENT> return requests, send_response
Tells the CopyingManager thread to advance to the point where its workers have emulated sending an RPC. @return: A tuple containing the list of AddEventsRequest's that were sent by each worker and a function that when invoked will set the passed in status message as the response to the AddEventsRequest for each worker.
625941bd76e4537e8c35156e
def __get_forms(self): <NEW_LINE> <INDENT> query = HtmlWebResourceQuery(max_size=True) <NEW_LINE> query.filter_by_web_service_scan(self.web_scan_uuid) <NEW_LINE> response = query.search(self.org_uuid) <NEW_LINE> forms = [] <NEW_LINE> for result in response.results: <NEW_LINE> <INDENT> result_forms = result["_source"].get("forms", []) <NEW_LINE> forms.extend(result_forms) <NEW_LINE> <DEDENT> return forms
Get a list containing dictionaries representing all of the forms associated with the inspected web application. :return: A list containing dictionaries representing all of the forms associated with the inspected web application.
625941bd55399d3f055885b1
def integrate(self): <NEW_LINE> <INDENT> self.dark_time_start = time.time() <NEW_LINE> azcam.db.tools["controller"].start_exposure(1) <NEW_LINE> self.dark_time = time.time() - self.dark_time_start <NEW_LINE> if self.comp_exposure: <NEW_LINE> <INDENT> if not azcam.db.tools["instrument"].shutter_strobe: <NEW_LINE> <INDENT> azcam.db.tools["instrument"].comps_off() <NEW_LINE> <DEDENT> azcam.db.tools["instrument"].set_comps("shutter") <NEW_LINE> <DEDENT> self.exposure_time_remaining = 0 <NEW_LINE> if self.image_type == "zero": <NEW_LINE> <INDENT> self.exposure_time = self.exposure_time_saved <NEW_LINE> <DEDENT> self.exposure_flag == self.exposureflags["READ"] <NEW_LINE> return
Integration.
625941bdfb3f5b602dac358e
def __init__(self,skydir,**kwargs): <NEW_LINE> <INDENT> self.init() <NEW_LINE> self.skydir = skydir <NEW_LINE> d = self.defaults.copy() <NEW_LINE> for kw in [self.factory_kwargs,self.catalog_kwargs, self.roi_kwargs,self.fit_kwargs,d]: <NEW_LINE> <INDENT> for k,v in kw.items(): <NEW_LINE> <INDENT> kw[k] = kwargs.pop(k,v) <NEW_LINE> <DEDENT> <DEDENT> self.__dict__.update(d) <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> print('\n'.join(['Unrecognized kwargs:']+ ["\t%s"%k for k in kwargs.keys()]+[''])) <NEW_LINE> <DEDENT> self.bins = self._get_bins() <NEW_LINE> self.bg_roi = self.background_fit() <NEW_LINE> if self.free_mask is None: <NEW_LINE> <INDENT> self.free_mask = self._get_free_mask() <NEW_LINE> <DEDENT> self.names = [ps.name.replace(' ','_') for ps in self.bg_roi.psm.point_sources[self.free_mask]] <NEW_LINE> self.nfree = self.free_mask.sum() <NEW_LINE> self.data = np.empty((self.bins.shape[0], self.nfree, 10) ,dtype='float')
Create and set up a LightCurve. Parameters: bg_interval: tuple [(239500801, today)] Start and stop times in MET for the background fit. Also determines the full range of the light curve. The default ending value is 0:00 of the current day, in UTC. refit_radius: float [2.] refit_flux: float [0.] free_mask: array(Bool) [None] Sources within |refit radius| of the central source and with fluxes >= |refit_flux| will have their flux parameters left free in the fits for individual time bins. If provided, |free_mask| overrides the mask determined by |refit_flux| and |refit_radius|. index_free: Bool [False] If true, all spectral parameters for the central source will be left free in the fits for the individual time bins timebin: string or int ['month'] The size of the individual time bins for the light curve. An integer specifies the length of a bin in days. A string can be one of 'day', 'week', or 'month', indicating bins of one day, one week, or one *calendar* month. ts_threshold: float [5] TS below which to calculate an upper limit for a source in a given time bin. upper_limit_confidence: float (0-1.) [.95] Confidence level for upper limit calculations
625941bd293b9510aa2c3196
def cartesian_heading(self, theta): <NEW_LINE> <INDENT> alpha = 90 - theta <NEW_LINE> alpha = alpha % 360 <NEW_LINE> return alpha
Return the absolute Cartesian heading for the turtle in degrees.
625941bd56ac1b37e62640d2
def checkAns(self) -> int: <NEW_LINE> <INDENT> if self.state == STATE_SINGLE: <NEW_LINE> <INDENT> print(' [Debug: your choice:] ' + str(self.ans)) <NEW_LINE> if list2str( self.ans) == dataList.SingleList[self.count % 10]['参考答案']: <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> elif self.state == STATE_MULTI: <NEW_LINE> <INDENT> print(' [Debug: your choice:] ' + str(self.ans)) <NEW_LINE> if list2str( self.ans) == dataList.MultiList[self.count % 10]['参考答案']: <NEW_LINE> <INDENT> return 5 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(' [Debug: your choice:] ' + str(self.ans)) <NEW_LINE> if list2str( self.ans) == dataList.JudgeList[self.count % 10]['参考答案']: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0
检查结果,返回本题得分
625941bd73bcbd0ca4b2bf78
def balance_equation(self, equation): <NEW_LINE> <INDENT> left_side_molecules, right_side_molecules = self.__get_molecules(equation) <NEW_LINE> self.logger.info("Parsing the equation...") <NEW_LINE> equation_matrix = self.matrix_creator.create_equation_matrix(equation) <NEW_LINE> self.logger.info("Creating the equation matrix...", args=equation_matrix) <NEW_LINE> try: <NEW_LINE> <INDENT> equation_coefficients = self.matrix_computer.compute_coefficients(equation_matrix) <NEW_LINE> <DEDENT> except ValueError as ex: <NEW_LINE> <INDENT> self.logger.error("Coefficients computing error: ", ex) <NEW_LINE> return False <NEW_LINE> <DEDENT> self.logger.info("Computed the coefficients:", args=equation_coefficients) <NEW_LINE> self.__print_results(left_side_molecules, right_side_molecules, equation_coefficients) <NEW_LINE> return self.balancing_validator.validate_balancing( left_side_molecules, right_side_molecules, equation_coefficients)
Computes the coefficients and prints the balanced equation. :param equation: the equation to be balanced :return: whether equation was successfully balanced or not :raises ValueError if the equation could not be balanced
625941bdf8510a7c17cf95f9
def artscrape(filename, sent_dictionary, sentiment, version): <NEW_LINE> <INDENT> if version == 'WSJ': <NEW_LINE> <INDENT> articlesearch = re.compile('(?<=\\n\\n\s\s\sLP\\t)(.)*?(?=\\n\\n\s\s\sCO\\t)', re.DOTALL) <NEW_LINE> datesearch = re.compile('(?<=\\n\s\s\sPD\\t)(.)+(?=\\n)') <NEW_LINE> newssearch = re.compile('(?<=\\n\s\s\sSN\\t)(.)+(?=\\n)') <NEW_LINE> wordssearch = re.compile('(?<=\\n\s\s\sWC\\t)([0-9,]+)') <NEW_LINE> <DEDENT> elif version == 'RET': <NEW_LINE> <INDENT> articlesearch = re.compile('(?<=\\n\sLP\\t)(.)+?(?=\\n\\n\s+RF\\t)', re.DOTALL) <NEW_LINE> datesearch = re.compile('(?<=\\n\sPD\\t)(.)+(?=\\n)') <NEW_LINE> newssearch = re.compile('(?<=\\n\sSN\\t)(.)+(?=\\n)') <NEW_LINE> wordssearch = re.compile('(?<=\\n\sWC\\t)([0-9,]+)') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise VersionError() <NEW_LINE> <DEDENT> with open(filename, encoding='utf-8') as doc: <NEW_LINE> <INDENT> text = doc.read() <NEW_LINE> <DEDENT> position = 0 <NEW_LINE> articlebox = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> datematch = datesearch.search(text, pos=position) <NEW_LINE> newsmatch = newssearch.search(text, pos=position) <NEW_LINE> artimatch = articlesearch.search(text, pos=position) <NEW_LINE> wordsmatch = wordssearch.search(text, pos=position) <NEW_LINE> newspiece = TrainArticle(datematch.group(), newsmatch.group(), artimatch.group(), wordsmatch.group(), sentiment.pop(0)) <NEW_LINE> position = artimatch.end() + 1 <NEW_LINE> newspiece.senteval(sent_dictionary) <NEW_LINE> articlebox.append(newspiece) <NEW_LINE> <DEDENT> except (AttributeError, IndexError): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return articlebox
This function processes the whole Factiva file Args: filename (str): the name of the Factiva file in the current working directory sent_dictionary (dict): dictionary returned by sentiment_dictionary function sentiment (list): list of sentiment scores matching the order of article present in the Factiva file version (str): Source of the news, currently implemented options are Wall Street Journal (WSJ) and Reuters (RET) Returns: List with initialized TrainArticle objects Raises: VersionError if the unrecognized version is selected.
625941bd4428ac0f6e5ba6ef
def coveragerc(opts): <NEW_LINE> <INDENT> template = get_template("coveragerc") <NEW_LINE> return template.substitute(opts)
Template of .coveragerc Args: opts: mapping parameters as dictionary Returns: str: file content as string
625941bd96565a6dacc8f5ca
def beta_create_PredictionService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): <NEW_LINE> <INDENT> request_deserializers = { ('tensorflow.serving.PredictionService', 'Predict'): tensorflow__serving_dot_apis_dot_predict__pb2.PredictRequest.FromString, } <NEW_LINE> response_serializers = { ('tensorflow.serving.PredictionService', 'Predict'): tensorflow__serving_dot_apis_dot_predict__pb2.PredictResponse.SerializeToString, } <NEW_LINE> method_implementations = { ('tensorflow.serving.PredictionService', 'Predict'): face_utilities.unary_unary_inline(servicer.Predict), } <NEW_LINE> server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) <NEW_LINE> return beta_implementations.server(method_implementations, options=server_options)
The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This function was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0
625941bd7047854f462a130a
def installULSControlRecorder(self, recorderType, calcSet= None): <NEW_LINE> <INDENT> recorder= self.createRecorder(recorderType, calcSet) <NEW_LINE> self.shape.setupULSControlVars(self.elemSet) <NEW_LINE> nodHndlr= self.getPreprocessor().getNodeHandler <NEW_LINE> if(nodHndlr.numDOFs==3): <NEW_LINE> <INDENT> recorder.callbackRecord= controlULSCriterion2D() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> recorder.callbackRecord= controlULSCriterion() <NEW_LINE> <DEDENT> return recorder
Install recorder for verification of ULS criterion. :param recorderType: type of the recorder to install. :param calcSet: set of elements to be checked (defaults to 'None' which means that this set will be created elsewhere). In not 'None' the member elements will be appended to this set.
625941bd8e7ae83300e4aeca
def get_entry(conn,table,field,value): <NEW_LINE> <INDENT> c = conn.cursor() <NEW_LINE> with conn: <NEW_LINE> <INDENT> c.execute(f"SELECT * FROM {table} WHERE {field}=:val", {'val': value}) <NEW_LINE> <DEDENT> ret = c.fetchall() <NEW_LINE> c.close() <NEW_LINE> return ret
field: name of the field to look the file by value: value to match field
625941bd5f7d997b87174992
def __init__(self, filenames, record_bytes, header_bytes=None, footer_bytes=None, buffer_size=None, compression_type=None, num_parallel_reads=None, name=None): <NEW_LINE> <INDENT> filenames = _create_or_validate_filenames_dataset(filenames, name=name) <NEW_LINE> self._filenames = filenames <NEW_LINE> self._record_bytes = record_bytes <NEW_LINE> self._header_bytes = header_bytes <NEW_LINE> self._footer_bytes = footer_bytes <NEW_LINE> self._buffer_size = buffer_size <NEW_LINE> self._compression_type = compression_type <NEW_LINE> def creator_fn(filename): <NEW_LINE> <INDENT> return _FixedLengthRecordDataset( filename, record_bytes, header_bytes, footer_bytes, buffer_size, compression_type, name=name) <NEW_LINE> <DEDENT> self._impl = _create_dataset_reader( creator_fn, filenames, num_parallel_reads, name=name) <NEW_LINE> variant_tensor = self._impl._variant_tensor <NEW_LINE> super(FixedLengthRecordDatasetV2, self).__init__(variant_tensor)
Creates a `FixedLengthRecordDataset`. Args: filenames: A `tf.string` tensor or `tf.data.Dataset` containing one or more filenames. record_bytes: A `tf.int64` scalar representing the number of bytes in each record. header_bytes: (Optional.) A `tf.int64` scalar representing the number of bytes to skip at the start of a file. footer_bytes: (Optional.) A `tf.int64` scalar representing the number of bytes to ignore at the end of a file. buffer_size: (Optional.) A `tf.int64` scalar representing the number of bytes to buffer when reading. compression_type: (Optional.) A `tf.string` scalar evaluating to one of `""` (no compression), `"ZLIB"`, or `"GZIP"`. num_parallel_reads: (Optional.) A `tf.int64` scalar representing the number of files to read in parallel. If greater than one, the records of files read in parallel are outputted in an interleaved order. If your input pipeline is I/O bottlenecked, consider setting this parameter to a value greater than one to parallelize the I/O. If `None`, files will be read sequentially. name: (Optional.) A name for the tf.data operation.
625941bd99fddb7c1c9de290
def extract(self): <NEW_LINE> <INDENT> extracted = {} <NEW_LINE> if self.is_dir: <NEW_LINE> <INDENT> for root, dirs, files in os.walk(self.path, topdown=True): <NEW_LINE> <INDENT> for name in files: <NEW_LINE> <INDENT> path_to_file = os.path.join(root, name) <NEW_LINE> self.logger.debug("Scanning '%s'" % path_to_file) <NEW_LINE> extracted_data = self.extract_lines_from_file(path_to_file) <NEW_LINE> extracted.update(extracted_data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> extracted_data = self.extract_lines_from_file(self.path) <NEW_LINE> extracted.update(extracted_data) <NEW_LINE> <DEDENT> return extracted
Extracts lines out of files.
625941bdd10714528d5ffbde
def show_plain_text(self, text): <NEW_LINE> <INDENT> self.visibility_changed(True) <NEW_LINE> self.raise_() <NEW_LINE> self.switch_to_plain_text() <NEW_LINE> self.set_plain_text(text, is_code=False)
Show text in plain mode
625941bde5267d203edcdb9e
def testInstallAndUninstallGame(self): <NEW_LINE> <INDENT> assert d(text="游戏中心").exists, 'Game Center icon not found!' <NEW_LINE> d(text="游戏中心").sibling(className='android.view.View').click.wait() <NEW_LINE> assert d(resourceId='android:id/content').child(text="推荐").wait.exists(timeout=20000), 'Launch Game Center failed!' <NEW_LINE> d.sleep(5) <NEW_LINE> d.press('left') <NEW_LINE> d.sleep(2) <NEW_LINE> d.press('left') <NEW_LINE> d.sleep(2) <NEW_LINE> d.press('left') <NEW_LINE> d.sleep(2) <NEW_LINE> d.press('down') <NEW_LINE> d.sleep(2) <NEW_LINE> d.press('enter') <NEW_LINE> d.sleep(3) <NEW_LINE> if d(className='android.widget.Button', text='启 动').wait.exists(timeout=5000): <NEW_LINE> <INDENT> assert d(resourceId='com.xiaomi.mibox.gamecenter:id/uninstall', text='卸载').wait.exists(timeout=5000), 'uninstall buuton not found!' <NEW_LINE> d.press('right') <NEW_LINE> d.sleep(3) <NEW_LINE> d.press('right') <NEW_LINE> d.sleep(3) <NEW_LINE> d.press('enter') <NEW_LINE> assert d(text='取消').wait.exists(timeout=5000), 'cancel button not found!' <NEW_LINE> d.press('left') <NEW_LINE> d.sleep(3) <NEW_LINE> d.press('enter') <NEW_LINE> assert d(resourceId='com.xiaomi.mibox.gamecenter:id/pop_selector_2').wait.exists(timeout=5000), 'return button not found!' <NEW_LINE> d.press('enter') <NEW_LINE> assert d(className='android.widget.FrameLayout').child(text="热门精选").wait.exists(timeout=5000), 'game main screen not found!' <NEW_LINE> <DEDENT> elif d(className='android.widget.Button', text='安 装').exists: <NEW_LINE> <INDENT> d(className='android.widget.Button', text='安 装').click.wait() <NEW_LINE> d.sleep(60) <NEW_LINE> assert d(className='android.widget.Button', text='启 动').wait.exists(timeout=5000), 'install game failed in 30 seconds!' <NEW_LINE> self.after_install = d.server.adb.cmd('shell pm list package -3').communicate()[0].split() <NEW_LINE> del_apk = [i.split('=')[1] for i in self.after_install if i not in self.before_install] <NEW_LINE> for apk in del_apk: <NEW_LINE> <INDENT> d.server.adb.cmd('shell pm uninstall %s' % apk) <NEW_LINE> d.sleep(3) <NEW_LINE> <DEDENT> assert d(className='android.widget.Button', text='安 装').wait.exists(timeout=18000), 'uninstall game failed' <NEW_LINE> d.press('back') <NEW_LINE> assert d(resourceId='android:id/content').child(text="推荐").wait.exists(timeout=5000), 'game main screen not found!' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, 'game preview screen not appear!' <NEW_LINE> <DEDENT> d.press('back')
launch app store and exit
625941bd498bea3a759b99ae
def build_pyfilter_command( pyfilter_args: Optional[List[str]] = None, pyfilter_path: Optional[str] = None ) -> str: <NEW_LINE> <INDENT> import hou <NEW_LINE> if pyfilter_args is None: <NEW_LINE> <INDENT> pyfilter_args = [] <NEW_LINE> <DEDENT> if pyfilter_path is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pyfilter_path = hou.findFile("pyfilter/houdini_toolbox-pyfilter.py") <NEW_LINE> <DEDENT> except hou.OperationFailed: <NEW_LINE> <INDENT> _logger.error("Could not find pyfilter/houdini_toolbox-pyfilter.py") <NEW_LINE> return "" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not os.path.isfile(pyfilter_path): <NEW_LINE> <INDENT> raise OSError(f"No such file: {pyfilter_path}") <NEW_LINE> <DEDENT> <DEDENT> cmd = f'-P "{pyfilter_path} {" ".join(pyfilter_args)}"' <NEW_LINE> return cmd
Build a PyFilter -P command. :param pyfilter_args: Optional list of args to pass to the command. :param pyfilter_path: Optional path to the filter script. :return: The constructed PyFilter command.
625941bd26238365f5f0ed68
def KmeansFiltering(inputfile, outputfile): <NEW_LINE> <INDENT> infile = open(inputfile, 'rb') <NEW_LINE> filereader = csv.reader(infile) <NEW_LINE> genreVector = next(filereader) <NEW_LINE> for row in filereader: <NEW_LINE> <INDENT> genreVector = np.vstack([genreVector, row]) <NEW_LINE> <DEDENT> centroids, labels = kmeans2(genreVector, 18) <NEW_LINE> new = list(labels) <NEW_LINE> outfile = open(outputfile, 'wb') <NEW_LINE> filewriter = csv.writer(outfile) <NEW_LINE> filewriter.writerows(new) <NEW_LINE> i=0 <NEW_LINE> recommendation = defaultdict(list) <NEW_LINE> infile = open('kmeans.txt', 'rb') <NEW_LINE> lines = infile.read().splitlines() <NEW_LINE> lines = lines[1:] <NEW_LINE> for row in lines: <NEW_LINE> <INDENT> recommendation[row].append(i) <NEW_LINE> i = i+1 <NEW_LINE> <DEDENT> return recommendation
:param inputfile: Feature vector for each movie :param outputfile: Kmeans classification of each movie :return: Recommendation of similar movies
625941bd6aa9bd52df036ca1
def save_options(self, opt_manager): <NEW_LINE> <INDENT> opt_manager.options['save_path'] = fix_path(self.savepath_box.GetValue())
Save panel options to OptionsHandler object.
625941bd91f36d47f21ac3ed
def log_mutant(self, active_file, logger): <NEW_LINE> <INDENT> logger.info("{0} - Line {1}".format(active_file, self.line_no)) <NEW_LINE> logger.info("Original: {0}".format(self.original_source.split('\n')[0])) <NEW_LINE> logger.info("Mutant : {0}".format(astor.to_source(self.base_node)).split('\n')[0])
Prints a one-line summary to highlight the difference between the original code and the mutant split(' ')[0] is used to truncate if/elif mutation instances (entire if sections were printed before)
625941bd3346ee7daa2b2c68
def renderer(self, text): <NEW_LINE> <INDENT> if type(text) == str: <NEW_LINE> <INDENT> text = unicode(text, self.app.config["FLATPAGES_ENCODING"]) <NEW_LINE> <DEDENT> if self.pre_render: <NEW_LINE> <INDENT> text = render_template_string(Markup(text)) <NEW_LINE> <DEDENT> extra_args = [ "--filter=pandoc-crossref", "--filter=pandoc-citeproc", "--filter=pandoc-sidenote", "--standalone", "--mathml", "--base-header-level=2", "--highlight-style", "pygments", '--bibliography="pages/all.bib"', "--csl=pages/lncs.csl", "-Mreference-section-title=References", "-Mlink-citations=true" ] <NEW_LINE> pandocver = int(pypandoc.get_pandoc_version()[0]) <NEW_LINE> if pandocver < 2: <NEW_LINE> <INDENT> extra_args.append("-S") <NEW_LINE> format_str = "markdown+raw_tex+yaml_metadata_block" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> format_str = "markdown+raw_tex+smart+yaml_metadata_block" <NEW_LINE> <DEDENT> output = pypandoc.convert_text( text.encode("utf8"), 'html', format = format_str, extra_args=extra_args ) <NEW_LINE> return output
Renders a flat page to HTML. :param text: the text of the flat page :type text: string
625941bd4527f215b584c358
def __init__(self, page): <NEW_LINE> <INDENT> self._page = page
Initiate compute resource profile form :type page: ComputeProfile :param page: The compute profile object ComputeProfile or ComputeResource
625941bd0a366e3fb873e716
@app.post('/<user_id:int>/domains/<domain_id:int>') <NEW_LINE> @app.post('/<user_id:int>/domains/<domain_id:int>/') <NEW_LINE> @require_perm(permission="domains_modify_all") <NEW_LINE> def domains_modify_domain_admin(user, user_id, domain_id): <NEW_LINE> <INDENT> data = request_data() <NEW_LINE> data['user_id'] = user_id <NEW_LINE> data['domain_id'] = domain_id <NEW_LINE> data = DomainEditValidator.parse(data) <NEW_LINE> domain = get_domain_by_id(int(domain_id), user_id=int(user.id)) <NEW_LINE> if 'comment' in data and data['comment'] is not None: <NEW_LINE> <INDENT> domain.comment = data['comment'] <NEW_LINE> <DEDENT> domain.save() <NEW_LINE> dbconn.session.safe_commit() <NEW_LINE> return ok({})
POST /domains/domain_id route
625941bdeab8aa0e5d26da5c
def find_plugin(name, class_type, kind): <NEW_LINE> <INDENT> canonical_name = name.replace('-', '_') <NEW_LINE> class_name = class_type.__name__ <NEW_LINE> try: <NEW_LINE> <INDENT> module_name = 'zazu.plugins.{}_{}'.format(canonical_name, kind) <NEW_LINE> module = importlib.import_module(module_name) <NEW_LINE> plugin_class = getattr(module, class_name) <NEW_LINE> assert(issubclass(plugin_class, class_type)), 'Plugin is not a subclass of {}'.format(class_name) <NEW_LINE> return plugin_class <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> raise click.ClickException('{} ({}) is not a known {}'.format(name, module_name, class_name))
Imports and finds a plugin by name and kind.
625941bd627d3e7fe0d68d4c
def test_propfind_same_as_sync_collection_sync_token(self) -> None: <NEW_LINE> <INDENT> calendar_path = "/calendar.ics/" <NEW_LINE> self.mkcalendar(calendar_path) <NEW_LINE> propfind = get_file_content("allprop.xml") <NEW_LINE> _, responses = self.propfind(calendar_path, propfind) <NEW_LINE> response = responses[calendar_path] <NEW_LINE> assert not isinstance(response, int) <NEW_LINE> status, sync_token = response["D:sync-token"] <NEW_LINE> assert status == 200 and sync_token.text <NEW_LINE> report_sync_token, _ = self._report_sync_token(calendar_path) <NEW_LINE> assert sync_token.text == report_sync_token
Compare sync-token property with sync-collection sync-token
625941bd63b5f9789fde6fe3
def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write(self._objectID, 'I')
Writes the current CreatedFixedCustomObject to the given BinaryWriter.
625941bd5fcc89381b1e15bb
def _get_headers(self): <NEW_LINE> <INDENT> return dict(self._get_default_headers(), **{'cookie': self._auth_cookie})
Instance method used to pass the default headers plus the auth cookie
625941bd26238365f5f0ed69
def getDialect(self) -> str: <NEW_LINE> <INDENT> conStr = ConnectionStringFactory.fromConfig(self) <NEW_LINE> return conStr.get_backend_name()
Parses the `db` key of the config and returns the database dialect. Returns ------- dialect : `str` Dialect found in the connection string.
625941bdc4546d3d9de72930
def test_login_to_get_token(client): <NEW_LINE> <INDENT> client.set_auth(TokenType.empty) <NEW_LINE> res = client.post(LOGIN_URL, data={'username': 'test', 'password': 'test'}) <NEW_LINE> assert res.status_code == 200 <NEW_LINE> assert res.json['access_token'] is not None <NEW_LINE> assert res.json['refresh_token'] is not None
Test login to get token.
625941bd796e427e537b04c1
def browse(self, pathitems): <NEW_LINE> <INDENT> pass
Browse the hub at a given location
625941bd596a8972360899c2
@mapcube_input <NEW_LINE> def write_layers(mc, directory, prefix, filetype='png', show_frame_number=False, **savefig_kwargs): <NEW_LINE> <INDENT> width = 1 + int(np.ceil(np.log10(len(mc)))) <NEW_LINE> filepaths = [] <NEW_LINE> for i, m in enumerate(mc): <NEW_LINE> <INDENT> plt.close('all') <NEW_LINE> file_number = '{0:0{width}}'.format(i, width=width) <NEW_LINE> filepath = os.path.join(os.path.expanduser(directory), '{:s}_{:s}.{:s}'.format(prefix, file_number, filetype)) <NEW_LINE> if show_frame_number: <NEW_LINE> <INDENT> title = '{:s}{:s}frame {:n}'.format(m.latex_name, '\n', i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = m.latex_name <NEW_LINE> <DEDENT> ret = m.plot(title=title) <NEW_LINE> plt.grid('on', color='cyan', linestyle=":") <NEW_LINE> plt.savefig(filepath, **savefig_kwargs) <NEW_LINE> filepaths.append(filepath) <NEW_LINE> <DEDENT> return filepaths
Write a numerically ordered set of images out from a mapcube. This function is useful for making movies using FFMPEG. :param mc: :param directory: :param prefix: :param filetype: :return:
625941bd287bf620b61d3964
def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(BasePaginationSerializer, self).__init__(*args, **kwargs) <NEW_LINE> results_field = self.results_field <NEW_LINE> object_serializer = self.opts.object_serializer_class <NEW_LINE> self.fields[results_field] = object_serializer(source='object_list')
Override init to add in the object serializer field on-the-fly.
625941bd23849d37ff7b2f8f
def validate_addresses(self, validate=True, channel=Channel.BOTH): <NEW_LINE> <INDENT> self.send('trick.var_validate_address({0})'.format(bool(validate)), channel)
Set whether or not addresses are validated. When bool(validate) is True, variable addresses will be validated against the memory manager before being read. Those not known to Trick are considered invalid and their values are reported as "BAD_REF". This prevents malformed variable requests, such as pointers with invalid offsets, from causing segmentation faults. Parameters ---------- validate : bool The desired validation state. channel : Channel The channel to affect.
625941bd435de62698dfdb51
def insert_report(self, document_tree, filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> insert_id = self.db.reports.insert_one(document_tree).inserted_id <NEW_LINE> print('{} inserted with id {}'.format(filename, insert_id)) <NEW_LINE> self.insert_org(document_tree, insert_id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('Failed to save {}'.format(filename)) <NEW_LINE> self.errors.append("{} RECORD NOT SAVED. FILENAME: {}".format(str(datetime.now()), filename))
Attempt to save report tree structure as new document in report collection params: document_tree -- nested hash tables representing the report filename -- string path of file currently being processed
625941bdbe7bc26dc91cd503
@Issue(3787) <NEW_LINE> def info_show_exclude(sbox): <NEW_LINE> <INDENT> sbox.build() <NEW_LINE> wc_dir = sbox.wc_dir <NEW_LINE> A_path = os.path.join(wc_dir, 'A') <NEW_LINE> iota = os.path.join(wc_dir, 'iota') <NEW_LINE> svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', A_path) <NEW_LINE> wc_uuid = svntest.actions.get_wc_uuid(wc_dir) <NEW_LINE> expected_info = [{ 'Path' : re.escape(wc_dir), 'Repository Root' : sbox.repo_url, 'Repository UUID' : wc_uuid, }] <NEW_LINE> svntest.actions.run_and_verify_info(expected_info, '--depth', 'empty', wc_dir) <NEW_LINE> expected_info = [{ 'Path' : '.*%sA' % re.escape(os.sep), 'Repository Root' : sbox.repo_url, 'Repository UUID' : wc_uuid, 'Depth' : 'exclude', }] <NEW_LINE> svntest.actions.run_and_verify_info(expected_info, '--depth', 'empty', A_path) <NEW_LINE> svntest.actions.run_and_verify_info(expected_info, '--depth', 'infinity', A_path) <NEW_LINE> svntest.actions.run_and_verify_info(expected_info, '--depth', 'immediates', A_path) <NEW_LINE> expected_info = [{ 'Path' : '.*%siota' % re.escape(os.sep), 'Repository Root' : sbox.repo_url, 'Repository UUID' : wc_uuid, }] <NEW_LINE> svntest.main.run_svn(None, 'up', '--set-depth', 'exclude', iota) <NEW_LINE> svntest.actions.run_and_verify_info(expected_info, iota) <NEW_LINE> expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(status='A '), }) <NEW_LINE> expected_status = svntest.wc.State(iota, { '' : Item(status=' ', wc_rev='1') }) <NEW_LINE> svntest.actions.run_and_verify_update(iota, expected_output, None, expected_status) <NEW_LINE> sbox.simple_rm('iota') <NEW_LINE> sbox.simple_commit() <NEW_LINE> expected_error = 'svn: E200009: Could not display info for all targets.*' <NEW_LINE> svntest.actions.run_and_verify_svn([], expected_error, 'info', iota) <NEW_LINE> sbox.simple_update() <NEW_LINE> svntest.actions.run_and_verify_svn([], expected_error, 'info', iota)
tests 'info --depth' variants on excluded node
625941bdbe7bc26dc91cd504
def wax(appendable, iterable): <NEW_LINE> <INDENT> for item in iterable: <NEW_LINE> <INDENT> appendable.append(item) <NEW_LINE> yield appendable
append-s *IN PLACE* appendable with one item from iterable, then yield-s the modified appendable. Originally intended to be applied to accumulate into a deque a limited number of characters from a file. Also originally intended that appendable will be occaisionaly reduced in size by an outside process before too many items accumulate. Example: >>> for item in wax([], iter(range(3))): ... print(item) ... [0] [0, 1] [0, 1, 2]
625941bdbf627c535bc130cd
def on_unknown_client(self, res): <NEW_LINE> <INDENT> res['advice'] = {'reconnect' : 'handshake'}
Called to handle an unknown client error. Default suggests another handshake attempt. No expected return value.
625941bda4f1c619b28aff3e
@public_request <NEW_LINE> def returnCurrencies(): <NEW_LINE> <INDENT> pass
:param session: Aiohttp client session object Returns information about currencies.
625941bd21a7993f00bc7bea
def get_memory_invariants(memories): <NEW_LINE> <INDENT> memory_type = type(memories) <NEW_LINE> if memory_type == dict: <NEW_LINE> <INDENT> memory_invariants = dict() <NEW_LINE> for layer_id in memories.keys(): <NEW_LINE> <INDENT> memory_invariants[layer_id] = {key: tf.TensorShape([None] * len(get_shape_list(memories[layer_id][key]))) for key in memories[layer_id].keys()} <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Memory type not supported, must be a dictionary.') <NEW_LINE> <DEDENT> return memory_invariants
Calculates the invariant shapes for the model memories (i.e. layer-wise attentions of the transformer).
625941bdd164cc6175782c4c
def get_chromium_target_version(os='win', channel='canary', target_distance=0): <NEW_LINE> <INDENT> compat_version = chromium_compat_version <NEW_LINE> compat_commit = get_git_hash(chromium_src_dir, compat_version) <NEW_LINE> if compat_version == compat_commit: <NEW_LINE> <INDENT> versions = get_chromium_versions(compat_commit) <NEW_LINE> if len(versions) > 0: <NEW_LINE> <INDENT> compat_version = 'refs/tags/' + versions[0] <NEW_LINE> compat_commit = get_git_hash(chromium_src_dir, compat_version) <NEW_LINE> <DEDENT> <DEDENT> compat_position = get_chromium_main_position(compat_commit) <NEW_LINE> compat_date = get_git_date(chromium_src_dir, compat_commit) <NEW_LINE> channel_version = 'refs/tags/' + get_chromium_channel_version(os, channel) <NEW_LINE> channel_commit = get_chromium_channel_commit(os, channel) <NEW_LINE> channel_position = get_chromium_main_position(channel_commit) <NEW_LINE> channel_date = get_git_date(chromium_src_dir, channel_commit) <NEW_LINE> if compat_position >= channel_position: <NEW_LINE> <INDENT> target_version = compat_version <NEW_LINE> target_commit = compat_commit <NEW_LINE> target_position = compat_position <NEW_LINE> target_date = compat_date <NEW_LINE> <DEDENT> elif target_distance <= 0 or compat_position + target_distance >= channel_position: <NEW_LINE> <INDENT> target_version = channel_version <NEW_LINE> target_commit = channel_commit <NEW_LINE> target_position = channel_position <NEW_LINE> target_date = channel_date <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target_position = compat_position + target_distance <NEW_LINE> target_commit = get_chromium_main_commit(target_position) <NEW_LINE> versions = get_chromium_versions(target_commit) <NEW_LINE> if len(versions) > 0: <NEW_LINE> <INDENT> target_version = 'refs/tags/' + versions[0] <NEW_LINE> target_commit = get_git_hash(chromium_src_dir, target_version) <NEW_LINE> target_position = get_chromium_main_position(target_commit) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target_version = target_commit <NEW_LINE> <DEDENT> target_date = get_git_date(chromium_src_dir, target_commit) <NEW_LINE> <DEDENT> msg("") <NEW_LINE> msg("Computed Chromium update for %s %s at distance %d" % (os, channel, target_distance)) <NEW_LINE> msg("Compat: %s %s %s (#%d)" % (compat_date, compat_version, compat_commit, compat_position)) <NEW_LINE> msg("Target: %s %s %s (#%d)" % (target_date, target_version, target_commit, target_position)) <NEW_LINE> msg("Channel: %s %s %s (#%d)" % (channel_date, channel_version, channel_commit, channel_position)) <NEW_LINE> msg("") <NEW_LINE> return target_version
Returns the target Chromium version based on a heuristic.
625941bd4c3428357757c228
def get_temp(self): <NEW_LINE> <INDENT> raw_temp = self.read_i2c_word(self.TEMP_OUT0) <NEW_LINE> actual_temp = (raw_temp / 340) + 36.53 <NEW_LINE> return actual_temp
Reads the temperature from the onboard temperature sensor of the MPU-6050. Returns the temperature in degrees Celcius.
625941bd23e79379d52ee465
def forward(self, src_tokens, src_lengths): <NEW_LINE> <INDENT> if self.history is not None: <NEW_LINE> <INDENT> self.history.clean() <NEW_LINE> <DEDENT> x = self.embed_scale * self.embed_tokens(src_tokens) <NEW_LINE> if self.embed_positions is not None: <NEW_LINE> <INDENT> x += self.embed_positions(src_tokens) <NEW_LINE> <DEDENT> x = F.dropout(x, p=self.dropout, training=self.training) <NEW_LINE> x = x.transpose(0, 1) <NEW_LINE> if self.history is not None: <NEW_LINE> <INDENT> self.history.add(x) <NEW_LINE> <DEDENT> encoder_padding_mask = src_tokens.eq(self.padding_idx) <NEW_LINE> if not encoder_padding_mask.any(): <NEW_LINE> <INDENT> encoder_padding_mask = None <NEW_LINE> <DEDENT> for layer in self.layers: <NEW_LINE> <INDENT> if self.history is not None: <NEW_LINE> <INDENT> x = self.history.pop() <NEW_LINE> <DEDENT> x = layer(x, encoder_padding_mask) <NEW_LINE> if self.history is not None: <NEW_LINE> <INDENT> self.history.add(x) <NEW_LINE> <DEDENT> <DEDENT> if self.history is not None: <NEW_LINE> <INDENT> x = self.history.pop() <NEW_LINE> <DEDENT> x = F.relu(self.fc1(x)) <NEW_LINE> x = self.fc2(x) <NEW_LINE> x = F.dropout(x, p=self.dropout, training=self.training) <NEW_LINE> if self.normalize: <NEW_LINE> <INDENT> x = self.layer_norm(x) <NEW_LINE> <DEDENT> return { 'encoder_out': x, 'encoder_padding_mask': encoder_padding_mask, }
Args: src_tokens (LongTensor): tokens in the source language of shape `(batch, src_len)` src_lengths (torch.LongTensor): lengths of each source sentence of shape `(batch)` Returns: dict: - **encoder_out** (Tensor): the last encoder layer's output of shape `(src_len, batch, embed_dim)` - **encoder_padding_mask** (ByteTensor): the positions of padding elements of shape `(batch, src_len)`
625941bdde87d2750b85fc8e
@pytest.fixture <NEW_LINE> def fr_tutorial_remove_sync_po(settings, french_tutorial, system): <NEW_LINE> <INDENT> po_directory = settings.PODIRECTORY <NEW_LINE> return _require_store(french_tutorial, po_directory, 'remove_sync_tutorial.po')
Require the /fr/tutorial/remove_sync_tutorial.po store.
625941bdcdde0d52a9e52f2e
def construct_ham_dict(coredetlist_sets, h1e, eri): <NEW_LINE> <INDENT> ham_dict = {} <NEW_LINE> ndets = len(coredetlist_sets) <NEW_LINE> for i in range(ndets): <NEW_LINE> <INDENT> idet = coredetlist_sets[i] <NEW_LINE> hii = calc_hii_sets(idet, h1e, eri) <NEW_LINE> ham_dict[frozenset((idet))] = hii <NEW_LINE> for j in range(i + 1, ndets): <NEW_LINE> <INDENT> jdet = coredetlist_sets[j] <NEW_LINE> nexc_ij = n_excit_sets(idet, jdet) <NEW_LINE> if nexc_ij in (1, 2): <NEW_LINE> <INDENT> if nexc_ij == 1: <NEW_LINE> <INDENT> hij = calc_hij_single_sets(idet, jdet, h1e, eri) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hij = calc_hij_double_sets(idet, jdet, h1e, eri) <NEW_LINE> <DEDENT> ham_dict[frozenset((idet, jdet))] = hij <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ham_dict
Where is my docstring?
625941bdd6c5a10208143f47
def _set_kws(kws, polygon=False, color='None', edgecolor='None', alpha=None, label=None): <NEW_LINE> <INDENT> kws = {} if kws is None else kws <NEW_LINE> if 'lw' not in kws: <NEW_LINE> <INDENT> kws.setdefault('linewidth', 1) <NEW_LINE> <DEDENT> if polygon: <NEW_LINE> <INDENT> if 'color' not in kws: <NEW_LINE> <INDENT> if 'fc' not in kws: <NEW_LINE> <INDENT> kws.setdefault('facecolor', color) <NEW_LINE> <DEDENT> if 'ec' not in kws: <NEW_LINE> <INDENT> kws.setdefault('edgecolor', edgecolor) <NEW_LINE> <DEDENT> <DEDENT> kws.setdefault('alpha', alpha) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'c' not in kws: <NEW_LINE> <INDENT> kws.setdefault('color', color) <NEW_LINE> <DEDENT> <DEDENT> kws.setdefault('label', label) <NEW_LINE> return kws
Set default kws for the kinematic analysis plot elements
625941bd91af0d3eaac9b914
def random_coefficient_hi_generator(number_of_particles): <NEW_LINE> <INDENT> if abs(number_of_particles) > 8 or abs(number_of_particles) < 2: <NEW_LINE> <INDENT> raise ValueError( "You must insert as number of particles a positive integer lower or equal to 8 and higher than 1.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hi = np.random.rand(number_of_particles) <NEW_LINE> return(hi)
This function randomly generates a set of coefficients (hi) used to generate Hamiltonian H1. They represent the local magnetic fields. Parameters: number_of_particles : number of particles present in the system of particles. Returns: The hi coefficients in form of a vector of dimension number_of_particles. Raise: ValueError if : number_of_particles is not between 2 and 8.
625941bd66656f66f7cbc0a9
def parse_args(args): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser( description="Just a Fibonacci demonstration") <NEW_LINE> parser.add_argument( "--version", action="version", version="clewsy {ver}".format(ver=__version__)) <NEW_LINE> parser.add_argument( dest="n", help="n-th Fibonacci number", type=int, metavar="INT") <NEW_LINE> parser.add_argument( "-v", "--verbose", dest="loglevel", help="set loglevel to INFO", action="store_const", const=logging.INFO) <NEW_LINE> parser.add_argument( "-vv", "--very-verbose", dest="loglevel", help="set loglevel to DEBUG", action="store_const", const=logging.DEBUG) <NEW_LINE> return parser.parse_args(args)
Parse command line parameters Args: args ([str]): command line parameters as list of strings Returns: :obj:`argparse.Namespace`: command line parameters namespace
625941bdfbf16365ca6f60bd
def _getAssetVersions(self, character, assetTypes=list()): <NEW_LINE> <INDENT> if not assetTypes: <NEW_LINE> <INDENT> assetTypes = self.ASSET_TYPES <NEW_LINE> <DEDENT> for assetType in assetTypes: <NEW_LINE> <INDENT> if not assetType in self.ASSET_TYPES: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not character: <NEW_LINE> <INDENT> self.ASSET_VERSIONS[assetType] = dict() <NEW_LINE> continue <NEW_LINE> <DEDENT> self.ASSET_VERSIONS[assetType] = getattr(combinationPublish, 'get%sVersions' % assetType)(character) <NEW_LINE> <DEDENT> return True
Iterates over the supplied asset types, and gets the publish version data which is used to populate the GUI. If no asset types are passed in the method defaults to all asset types in self.ASSET_TYPES.
625941bd2c8b7c6e89b356c1
def handle(self, req): <NEW_LINE> <INDENT> self.handler(WebSocket(req))
Calls the Handler with the WebSocket when connected
625941bd29b78933be1e55af
def log1p_(self): <NEW_LINE> <INDENT> if self.encrypted: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> self.data = np.log1p(self.data) <NEW_LINE> return self
Performs element wise log(1+x) operation in-place Parameters ---------- Returns ------- Caller with values in-place
625941bd8a43f66fc4b53f67
def df_apply(data): <NEW_LINE> <INDENT> data[["age"]] = data[["age"]].apply(np.sum, axis=0) <NEW_LINE> print(data[["age"]])
针对dataframe使用apply
625941bd0a50d4780f666d8e
def stdin(self, key: str): <NEW_LINE> <INDENT> value = input(f"{key}: ") <NEW_LINE> try: <NEW_LINE> <INDENT> value = Int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> value = String(value) <NEW_LINE> <DEDENT> self.save(key, value)
This function reads value from standard input and stores it in given variable.
625941bde8904600ed9f1e28
def iter_chunks(req_id, path): <NEW_LINE> <INDENT> chksum = sha1() <NEW_LINE> with open(path, 'rb') as f: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> chunk = f.read(10240) <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> yield OP_PUT_END, req_id, {'sha1sum': chksum.hexdigest()} <NEW_LINE> break <NEW_LINE> <DEDENT> chksum.update(chunk) <NEW_LINE> yield OP_PUT_DATA, req_id, chunk
Iterate over chunks of the given file. Yields message suitable for writing to a stream.
625941bd67a9b606de4a7dbb
@login_required(login_url='booking:login') <NEW_LINE> def index(request): <NEW_LINE> <INDENT> num_inventory = Inventory.objects.all().count() <NEW_LINE> num_instances_available = Inventory.objects.filter(remaining_count__exact=1).count() + Inventory.objects.filter(remaining_count__exact=2).count() + Inventory.objects.filter(remaining_count__exact=3).count() + Inventory.objects.filter(remaining_count__exact=4).count() + Inventory.objects.filter(remaining_count__exact=5).count() <NEW_LINE> num_members = Booking.objects.count() <NEW_LINE> context = { 'num_inventory': num_inventory, 'num_instances_available': num_instances_available, 'num_members': num_members, } <NEW_LINE> return render(request, 'index.html', context=context)
View function for home page of site.
625941bd0fa83653e4656ebb
def get_fail_exception(self): <NEW_LINE> <INDENT> if self.fail_error: <NEW_LINE> <INDENT> return common.import_component(self.fail_error)
Get the exception that's appropriate for this error
625941bda934411ee3751592
def __init__(self, selector, population, selected): <NEW_LINE> <INDENT> self.selector = selector <NEW_LINE> self.population = population <NEW_LINE> self.selected = selected
Initialize a :class:`.CaseData` instance. Parameters ---------- selector : :class:`.Selector` The selector to test. population : :class:`tuple` of :class:`.MoleculeRecord` The population from which batches are selected. selected : :class:`tuple` of :class:`.Batch` The batches which should be selected.
625941bd6fece00bbac2d63b
def test_incomplete_knxip_frame_excetpion(self): <NEW_LINE> <INDENT> exception = IncompleteKNXIPFrame("Hello") <NEW_LINE> assert str(exception) == '<IncompleteKNXIPFrame description="Hello" />'
Test string representation of IncompleteKNXIPFrame exception.
625941bdb5575c28eb68defd
def weight(name, shape, init='he', value = 0.0, range=None): <NEW_LINE> <INDENT> initializer = tf.constant_initializer() <NEW_LINE> fan_in = shape[0] if len(shape) == 2 else np.prod(shape[:-1]) <NEW_LINE> fan_out = shape[1] if len(shape) == 2 else shape[-1] <NEW_LINE> if init == 'xavier': <NEW_LINE> <INDENT> range = math.sqrt(6.0 / (fan_in + fan_out)) <NEW_LINE> initializer = tf.random_uniform_initializer(-range, range) <NEW_LINE> <DEDENT> elif init == 'he': <NEW_LINE> <INDENT> std = math.sqrt(2.0 / fan_in) <NEW_LINE> initializer = tf.random_normal_initializer(stddev=std) <NEW_LINE> <DEDENT> elif init == 'normal': <NEW_LINE> <INDENT> initializer = tf.random_normal_initializer(stddev=0.1) <NEW_LINE> <DEDENT> elif init == 'uniform': <NEW_LINE> <INDENT> if range is None: <NEW_LINE> <INDENT> raise ValueError("range must not be None if uniform init is used.") <NEW_LINE> <DEDENT> initializer = tf.random_uniform_initializer(-range, range) <NEW_LINE> <DEDENT> elif init == 'constant': <NEW_LINE> <INDENT> initializer = init_ops.constant_initializer(value) <NEW_LINE> <DEDENT> var = tf.get_variable(name, shape, initializer=initializer) <NEW_LINE> tf.add_to_collection('l2', tf.nn.l2_loss(var)) <NEW_LINE> return var
Initializes weight. :param name: Variable name :param shape: Tensor shape :param init: Init mode. xavier / normal / uniform / he (default is 'he') :param range: :return: Variable
625941bd498bea3a759b99af
def DropCAltAndLiga(): <NEW_LINE> <INDENT> def dropcaltandliga_op(fnt): <NEW_LINE> <INDENT> for lookup in fnt.gsub_lookups: <NEW_LINE> <INDENT> if fnt.getLookupInfo(lookup)[0] in ['gsub_ligature', 'gsub_contextchain']: <NEW_LINE> <INDENT> fnt.removeLookup(lookup) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dropcaltandliga_op
Removes Contextual Alternates and Ligatures
625941bd32920d7e50b280cc
def visit_RelTimeValue(self, node): <NEW_LINE> <INDENT> if node.time < 0: <NEW_LINE> <INDENT> raise self._SYNTAX_ERR_INVALID_VAL <NEW_LINE> <DEDENT> return {"mode": "rel", "delta": node.time}
Visit relative time value.
625941bdec188e330fd5a6a3
def __init__(self, prices, frequency=252): <NEW_LINE> <INDENT> if not isinstance(prices, pd.DataFrame): <NEW_LINE> <INDENT> warnings.warn("prices are not in a dataframe", RuntimeWarning) <NEW_LINE> prices = pd.DataFrame(prices) <NEW_LINE> <DEDENT> self.frequency = frequency <NEW_LINE> self.X = prices.pct_change().dropna(how="all") <NEW_LINE> self.S = self.X.cov().values <NEW_LINE> self.delta = None
:param prices: adjusted closing prices of the asset, each row is a date and each column is a ticker/id. :type prices: pd.DataFrame :param frequency: number of time periods in a year, defaults to 252 (the number of trading days in a year) :type frequency: int, optional
625941bdad47b63b2c509e7f
@pytest.mark.parametrize(('ops', 'which', 'expected'), [ ('f', 'rm', False), ('f', 'rmdir', False), (empty_file('f'), 'rm', True), (simple_file('f', 'spam'), 'rm', True), (mkdir('d'), 'rmdir', True), (empty_file('f'), 'rmdir', 'error'), (simple_file('f', 'spam'), 'rmdir', 'error'), (mkdir('d'), 'rm', 'error'), (symlink('l', 'target'), 'rm', 'error'), (symlink('l', 'target'), 'rmdir', 'error'), ]) <NEW_LINE> def test_removething_check_if_must_change(tmpdir, ops, which, expected): <NEW_LINE> <INDENT> p = make_path(tmpdir, ops) <NEW_LINE> rt = getattr(runit_sv, which)(p) <NEW_LINE> if expected == 'error': <NEW_LINE> <INDENT> with pytest.raises(runit_sv.NotAThingError): <NEW_LINE> <INDENT> rt.check_if_must_change() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> rt.check_if_must_change() <NEW_LINE> assert rt.must_change == expected
RemoveThing objects will indicate if their desired state is not the same as their referenced path's current state.
625941bd38b623060ff0acee
def test_attach_blueprint(self): <NEW_LINE> <INDENT> bp1 = Blueprint('bp1', __name__) <NEW_LINE> bp2 = Blueprint('bp2', __name__) <NEW_LINE> bpb = BlueprintBundle(path='/test') <NEW_LINE> bpb.attach_bp(bp1) <NEW_LINE> bpb.attach_bp(bp2) <NEW_LINE> self.assertTrue(bpb.blueprints.__contains__, bp1) <NEW_LINE> self.assertTrue(bpb.blueprints.__contains__, bp2)
Attached blueprints should end up in BlueprintBundle.blueprints
625941bdd7e4931a7ee9de1c
def sample(self, **params): <NEW_LINE> <INDENT> url = 'https://stream.twitter.com/%s/statuses/sample.json' % self.streamer.api_version <NEW_LINE> self.streamer._request(url, params=params)
Stream statuses/sample :param \*\*params: Parameters to send with your stream request Accepted params found at: https://developer.twitter.com/en/docs/tweets/sample-realtime/api-reference/get-statuses-sample
625941bd099cdd3c635f0b5b
def krull_dimension(self): <NEW_LINE> <INDENT> return ZZ(1)
Return the Krull dimension of this order, which is 1. EXAMPLES:: sage: K.<a> = QuadraticField(5) sage: OK = K.maximal_order() sage: OK.krull_dimension() 1 sage: O2 = K.order(2*a) sage: O2.krull_dimension() 1
625941bd8e05c05ec3eea271