code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def solve_linear_diophantine_equation(a, b, c): <NEW_LINE> <INDENT> for x in (a, b, c): <NEW_LINE> <INDENT> if not isinstance(x, numbers.Integral): <NEW_LINE> <INDENT> raise TypeError("can't find solution for non-ints") <NEW_LINE> <DEDENT> <DEDENT> if c == 0: <NEW_LINE> <INDENT> return 0, 0 <NEW_LINE> <DEDENT> gcd, s, t = euclidean_alg.extended_gcd(a, b) <NEW_LINE> if c % gcd != 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return s * c // gcd, t * c // gcd | Solve the linear Diophantine equation: a*x + b*y = c, and return a tuple.
Return the tuple (x, y); or None if no solution exists.
a -- an integer
b -- an integer
c -- an integer | 625941bd460517430c394095 |
def __init__(self, encoder, matcher, decoder, **kwargs): <NEW_LINE> <INDENT> self.embed_path = kwargs["embed_path"] <NEW_LINE> self.embed_size = kwargs["vocab_dim"] <NEW_LINE> self.encoder = encoder <NEW_LINE> self.matcher = matcher <NEW_LINE> self.decoder = decoder <NEW_LINE> self.p_placeholder = tf.placeholder(tf.int32, [None, Config.max_p_len]) <NEW_LINE> self.p_mask_placeholder = tf.placeholder(tf.bool, [None, Config.max_p_len]) <NEW_LINE> self.p_actual_len_placeholder = tf.placeholder(tf.int32, [None]) <NEW_LINE> self.q_placeholder = tf.placeholder(tf.int32, [None, Config.max_q_len]) <NEW_LINE> self.q_mask_placeholder = tf.placeholder(tf.bool, [None, Config.max_q_len]) <NEW_LINE> self.q_actual_len_placeholder = tf.placeholder(tf.int32, [None]) <NEW_LINE> self.begin_placeholder = tf.placeholder(tf.int32, [None]) <NEW_LINE> self.end_placeholder = tf.placeholder(tf.int32, [None]) <NEW_LINE> with tf.variable_scope("qa", initializer=tf.contrib.layers.xavier_initializer()): <NEW_LINE> <INDENT> self.setup_embeddings() <NEW_LINE> self.setup_system() <NEW_LINE> self.setup_loss() <NEW_LINE> <DEDENT> optimizer = get_optimizer("adam")(learning_rate=0.0001) <NEW_LINE> grads_and_vars = optimizer.compute_gradients(self.loss) <NEW_LINE> grads, tvars = zip(*grads_and_vars) <NEW_LINE> grads, _ = tf.clip_by_global_norm(grads, Config.max_grad_norm) <NEW_LINE> self.grads = grads <NEW_LINE> self.train_op = optimizer.apply_gradients(zip(grads, tvars)) | Initializes your System
:param encoder: an encoder that you constructed in train.py
:param decoder: a decoder that you constructed in train.py
:param args: pass in more arguments as needed | 625941bd30dc7b7665901872 |
def modify_humidity(self, id, timestamp, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> id = int(id) <NEW_LINE> timestamp = int(timestamp) <NEW_LINE> <DEDENT> except(ValueError): <NEW_LINE> <INDENT> raise ValueError("The deviceid is malformed") <NEW_LINE> <DEDENT> query2 = 'SELECT * FROM WIND_DATA WHERE device_id = ? AND date = ?' <NEW_LINE> qvalue2 = (id, timestamp,) <NEW_LINE> self.con.row_factory = sqlite3.Row <NEW_LINE> cur = self.con.cursor() <NEW_LINE> cur.execute(query2, qvalue2,) <NEW_LINE> row = cur.fetchone() <NEW_LINE> if row is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> query = 'UPDATE WIND_DATA SET humidity = ? WHERE device_id = ? AND date = ?' <NEW_LINE> qvalue = (value, id, timestamp,) <NEW_LINE> self.con.row_factory = sqlite3.Row <NEW_LINE> cur = self.con.cursor() <NEW_LINE> try: <NEW_LINE> <INDENT> cur.execute(query, qvalue) <NEW_LINE> self.con.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> except sqlite3.Error as e: <NEW_LINE> <INDENT> print("Error %s:" % (e.args[0])) <NEW_LINE> return False | modifies the humidity value on timestamp with value given as argument
:param id
:param timestamp:
:param value:
:return: True (204) if value was deleted, else False (if deleted already, or timestamp does not exist) | 625941bdff9c53063f47c0fe |
def callback(self, name, mode, index): <NEW_LINE> <INDENT> raw, status = self.validate(self._variable.get()) <NEW_LINE> bg_color_indicator(self, status) | this callback is called whenever the entry field is written.
the entered value is validated. If validation fails, the background of
the entry field changes to red, indicating an error to the user. | 625941bd5fcc89381b1e15c6 |
def round_through(x, stochastic=False): <NEW_LINE> <INDENT> if stochastic: <NEW_LINE> <INDENT> rounded = tf.ceil(x - tf.random_uniform(tf.shape(x), dtype=x.dtype)) <NEW_LINE> return x + tf.stop_gradient(rounded - x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rounded = tf.rint(x) <NEW_LINE> return x + tf.stop_gradient(rounded - x) | Element-wise rounding to the closest integer with full gradient propagation.
A trick from [Sergey Ioffe](http://stackoverflow.com/a/36480182) | 625941bd236d856c2ad446df |
def list_origins(self, configuration_id): <NEW_LINE> <INDENT> url = self.session.build_url( 'content_delivery', 'configurations', configuration_id, 'origins') <NEW_LINE> response = self.session.get(url) <NEW_LINE> data = decode_json(response, 200) <NEW_LINE> return many_of(Origin, data) | List origins of the given configuration.
:param int configuration_id:
Configuration ID | 625941bdec188e330fd5a6ae |
@app.route('/delacc', methods=["POST"]) <NEW_LINE> @login_required <NEW_LINE> def delAcc(): <NEW_LINE> <INDENT> delAccForm = AddDelAccForm() <NEW_LINE> if delAccForm.validate_on_submit(): <NEW_LINE> <INDENT> username = get_session_user() <NEW_LINE> user = get_user(username) <NEW_LINE> db.session.delete(user) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> return redirect("/") | Form route for deleting user from database | 625941bd8a43f66fc4b53f72 |
def __init__(self, config): <NEW_LINE> <INDENT> self._config = config <NEW_LINE> if self._log_cli_enabled() and config.getoption("verbose") < 1: <NEW_LINE> <INDENT> config.option.verbose = 1 <NEW_LINE> <DEDENT> self.print_logs = get_option_ini(config, "log_print") <NEW_LINE> self.formatter = self._create_formatter( get_option_ini(config, "log_format"), get_option_ini(config, "log_date_format"), ) <NEW_LINE> self.log_level = get_actual_log_level(config, "log_level") <NEW_LINE> self.log_file_level = get_actual_log_level(config, "log_file_level") <NEW_LINE> self.log_file_format = get_option_ini(config, "log_file_format", "log_format") <NEW_LINE> self.log_file_date_format = get_option_ini( config, "log_file_date_format", "log_date_format" ) <NEW_LINE> self.log_file_formatter = logging.Formatter( self.log_file_format, datefmt=self.log_file_date_format ) <NEW_LINE> log_file = get_option_ini(config, "log_file") <NEW_LINE> if log_file: <NEW_LINE> <INDENT> self.log_file_handler = logging.FileHandler( log_file, mode="w", encoding="UTF-8" ) <NEW_LINE> self.log_file_handler.setFormatter(self.log_file_formatter) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log_file_handler = None <NEW_LINE> <DEDENT> self.log_cli_handler = None <NEW_LINE> self.live_logs_context = lambda: dummy_context_manager() <NEW_LINE> if self._log_cli_enabled(): <NEW_LINE> <INDENT> self._setup_cli_logging() | Creates a new plugin to capture log messages.
The formatter can be safely shared across all handlers so
create a single one for the entire test session here. | 625941bd099cdd3c635f0b66 |
def register(request): <NEW_LINE> <INDENT> if request.method != 'POST': <NEW_LINE> <INDENT> form = UserCreationForm() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = UserCreationForm(data=request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> new_user = form.save() <NEW_LINE> authenticated_user = authenticate(username=new_user.username, password=request.POST[ 'password1']) <NEW_LINE> login(request, authenticated_user) <NEW_LINE> return HttpResponseRedirect(reverse('learning_logs:index')) <NEW_LINE> <DEDENT> context = {'form': form} <NEW_LINE> return render(request, 'users/register.html', context) | Регистрирует нового пользователя. | 625941bd1d351010ab855a26 |
def load_data(year): <NEW_LINE> <INDENT> year = str(year) <NEW_LINE> if year in CACHE: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> data_file = os.path.join( os.path.dirname(__file__), 'data', '{}.csv'.format(year), ) <NEW_LINE> if not os.path.isfile(data_file): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> CACHE[year] = {} <NEW_LINE> with io.open(data_file, encoding='utf-8') as rf: <NEW_LINE> <INDENT> has_header = csv.Sniffer().has_header(rf.read(1024)) <NEW_LINE> rf.seek(0) <NEW_LINE> reader = csv.DictReader(decomment(rf), DATA_FIELDS) <NEW_LINE> if has_header: <NEW_LINE> <INDENT> next(reader) <NEW_LINE> <DEDENT> for data_line in reader: <NEW_LINE> <INDENT> day = cleanup_dict(data_line) <NEW_LINE> dt = datetime.strptime(day['date'], '%Y-%m-%d') <NEW_LINE> day.update({ 'year': dt.year, 'month': dt.month, 'day': dt.day, 'isholiday': bool(int(day['isholiday'])), 'isworkday': bool(int(day['isworkday'])), }) <NEW_LINE> CACHE[year][day.pop('date')] = day <NEW_LINE> <DEDENT> <DEDENT> return True | Load data into memory cache | 625941bdf9cc0f698b140507 |
def __init__(self, subjects, name="dataset", keep_on_cpu=False): <NEW_LINE> <INDENT> self.subjects = subjects <NEW_LINE> self.nb_streamlines_per_sujet = [] <NEW_LINE> self.streamlines_per_sujet_offsets = [] <NEW_LINE> offset = 0 <NEW_LINE> self.streamlines = nib.streamlines.ArraySequence() <NEW_LINE> for i, subject in enumerate(self.subjects): <NEW_LINE> <INDENT> self.streamlines.extend(subject.streamlines) <NEW_LINE> self.nb_streamlines_per_sujet.append(len(subject.streamlines)) <NEW_LINE> self.streamlines_per_sujet_offsets.append(offset) <NEW_LINE> offset += len(subject.streamlines) <NEW_LINE> <DEDENT> super().__init__(self.streamlines, targets=None, name=name, keep_on_cpu=keep_on_cpu) <NEW_LINE> self.streamline_id_to_volume_id = np.nan * np.ones((len(self.streamlines),)) <NEW_LINE> start = 0 <NEW_LINE> for subject in self.subjects: <NEW_LINE> <INDENT> end = start + len(subject.streamlines) <NEW_LINE> self.streamline_id_to_volume_id[start:end] = subject.subject_id <NEW_LINE> start = end <NEW_LINE> <DEDENT> assert not np.isnan(self.streamline_id_to_volume_id.sum()) <NEW_LINE> self.streamline_id_to_volume_id = self.streamline_id_to_volume_id.astype(floatX) | Parameters
----------
subjects: list of TractogramData | 625941bd63f4b57ef0001029 |
def get_app(self): <NEW_LINE> <INDENT> self.context = self.create_wscontext(client=False) <NEW_LINE> return web.Application([ (r'/ws', WsServerHandler, dict(context=self.context)), ]) | Generate the default 'wsproxy' app for use in the test.
To customize the routes (for example), override 'get_routes()' and
'get_wscontext()' as appropriate. | 625941bd55399d3f055885bc |
@_requires_edfapi <NEW_LINE> def test_raw_plot(): <NEW_LINE> <INDENT> for fi, fname in enumerate(fnames): <NEW_LINE> <INDENT> raw = read_raw(fname) <NEW_LINE> raw.plot_calibration() <NEW_LINE> raw.plot_heatmap(0., 10., vmax=1) <NEW_LINE> raw.plot_heatmap(0., 10., kernel=None) <NEW_LINE> raw.plot() | Test plotting of raw. | 625941bd76e4537e8c35157a |
def add_waypoint_file(self, filename): <NEW_LINE> <INDENT> print('Adding waypoint file...') <NEW_LINE> if not os.path.exists(filename): <NEW_LINE> <INDENT> raise RuntimeError('Waypoint file {} does not exist.'.format(filename)) <NEW_LINE> <DEDENT> if filename.lower().endswith('.cup'): <NEW_LINE> <INDENT> dst = os.path.join(self.__dir_temp, "waypoints.cup") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dst = os.path.join(self.__dir_temp, "waypoints.xcw") <NEW_LINE> <DEDENT> shutil.copy(filename, dst) <NEW_LINE> if not os.path.exists(dst): <NEW_LINE> <INDENT> raise RuntimeError('Copying {} to {} failed.'.format(os.path.basename(filename), dst)) <NEW_LINE> <DEDENT> self.__files.add(dst, True) | Adds a waypoint file to the map
@param filename: The file that should be added | 625941bd711fe17d8254227a |
def Run(self, args): <NEW_LINE> <INDENT> apitools_client = self.context[commands.DATAFLOW_APITOOLS_CLIENT_KEY] <NEW_LINE> dataflow_messages = self.context[commands.DATAFLOW_MESSAGES_MODULE_KEY] <NEW_LINE> job_ref = job_utils.ExtractJobRef(self.context, args) <NEW_LINE> start_time = args.changed_after and time_util.Strftime(args.changed_after) <NEW_LINE> request = dataflow_messages.DataflowProjectsJobsGetMetricsRequest( projectId=job_ref.projectId, jobId=job_ref.jobId, startTime=start_time) <NEW_LINE> preds = [] <NEW_LINE> if not args.tentative and args.hide_committed: <NEW_LINE> <INDENT> raise calliope_exceptions.ToolException( 'Cannot exclude both tentative and committed metrics.') <NEW_LINE> <DEDENT> elif not args.tentative and not args.hide_committed: <NEW_LINE> <INDENT> preds.append(lambda m: self._GetContextValue(m, 'tentative') != 'true') <NEW_LINE> <DEDENT> elif args.tentative and args.hide_committed: <NEW_LINE> <INDENT> preds.append(lambda m: self._GetContextValue(m, 'tentative') == 'true') <NEW_LINE> <DEDENT> if args.changed_after: <NEW_LINE> <INDENT> parsed_time = time_util.ParseTimeArg(args.changed_after) <NEW_LINE> preds.append(lambda m: time_util.ParseTimeArg(m.updateTime) > parsed_time) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = apitools_client.projects_jobs.GetMetrics(request) <NEW_LINE> <DEDENT> except exceptions.HttpError as error: <NEW_LINE> <INDENT> raise calliope_exceptions.HttpException( 'Failed to get metrics for job with ID [{0}] in project [{1}]: {2}' .format(job_ref.jobId, job_ref.projectId, dataflow_util.GetErrorMessage(error))) <NEW_LINE> <DEDENT> return [m for m in response.metrics if all([pred(m) for pred in preds])] | This is what gets called when the user runs this command.
Args:
args: all the arguments that were provided to this command invocation.
Returns:
None on success, or a string containing the error message. | 625941bddd821e528d63b0b4 |
def get_html(url): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> html = urlopen(url) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('нет сайта такого') <NEW_LINE> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return html.read() | Считывает страницу в html | 625941bd5fc7496912cc3888 |
def points(self): <NEW_LINE> <INDENT> my_solutions = Challenge.objects.filter(solution__solver__team=self) <NEW_LINE> annotated = my_solutions.annotate(total_points=models.Sum('points')) <NEW_LINE> points = annotated.values('total_points') <NEW_LINE> return points[0]['total_points'] if points else 0 | Gets the total score for all the challenges solved by this team. | 625941bdbaa26c4b54cb102c |
def load_yaml_config(version): <NEW_LINE> <INDENT> checkout_path = version.project.checkout_path(version.slug) <NEW_LINE> img_name = version.project.container_image or DOCKER_IMAGE <NEW_LINE> env_config = { 'build': { 'image': img_name, } } <NEW_LINE> img_settings = DOCKER_IMAGE_SETTINGS.get(img_name, None) <NEW_LINE> if img_settings: <NEW_LINE> <INDENT> env_config.update(img_settings) <NEW_LINE> env_config['DOCKER_IMAGE_SETTINGS'] = img_settings <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> sphinx_env_config = env_config.copy() <NEW_LINE> sphinx_env_config.update({ 'output_base': '', 'type': 'sphinx', 'name': version.slug, }) <NEW_LINE> config = load_config( path=checkout_path, env_config=sphinx_env_config, )[0] <NEW_LINE> <DEDENT> except InvalidConfig: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except ConfigError: <NEW_LINE> <INDENT> config = BuildConfig( env_config=env_config, raw_config={}, source_file='empty', source_position=0, ) <NEW_LINE> <DEDENT> return ConfigWrapper(version=version, yaml_config=config) | Load a configuration from `readthedocs.yml` file.
This uses the configuration logic from `readthedocs-build`, which will keep
parsing consistent between projects. | 625941bdbf627c535bc130d8 |
def gen_discriminator_train_step(self): <NEW_LINE> <INDENT> discriminator_loss = self.gen_discriminator_loss() <NEW_LINE> discriminator_optimizer = self.get_discriminator_optimizer() <NEW_LINE> @tf.function <NEW_LINE> def train_step(batch): <NEW_LINE> <INDENT> arguments = batch.copy() <NEW_LINE> with tf.GradientTape() as tape: <NEW_LINE> <INDENT> encoder_output = self.encoder(self.encoder_input_prepare(arguments), training=False) <NEW_LINE> arguments.update({'encoder_output': encoder_output}) <NEW_LINE> discriminator_output = self.discriminator(self.discriminator_input_prepare(arguments), training=True) <NEW_LINE> arguments.update({'discriminator_output': discriminator_output}) <NEW_LINE> discriminator_real_output = self.discriminator(self.discriminator_salted_input_prepare(arguments), training=True) <NEW_LINE> arguments.update({'discriminator_real_output': discriminator_real_output}) <NEW_LINE> discriminator_loss_value = discriminator_loss(arguments) <NEW_LINE> <DEDENT> discriminator_trainable = self.discriminator.trainable_variables <NEW_LINE> discriminator_gradients = tape.gradient(discriminator_loss_value, discriminator_trainable) <NEW_LINE> discriminator_optimizer.apply_gradients(zip(discriminator_gradients, discriminator_trainable)) <NEW_LINE> return (None, None, None, (tf.reduce_mean(discriminator_loss_value), tf.reduce_mean(discriminator_output), tf.reduce_mean(discriminator_real_output)) ) <NEW_LINE> <DEDENT> return train_step | Returns compiled TensorFlow function, responsible for a single learning step.
The function modifies (learns) ONLY discriminator.
Result may be used as training_step argument in train member function.
:return: @tf.function performing a single (batch) learning step | 625941bde5267d203edcdba9 |
def act(self, round, prev_state, prev_action, reward, new_state, too_slow): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> encapsulated_boards = self.hot_boards(new_state) <NEW_LINE> winning_vec = self.get_winning_vector_with_enemies(new_state, self.id, self.enemy_id) <NEW_LINE> if np.random.rand() < self.epsilon: <NEW_LINE> <INDENT> action = self.get_random_action(new_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = self.get_qNet_action(encapsulated_boards, winning_vec) <NEW_LINE> <DEDENT> if self.mode == 'train': <NEW_LINE> <INDENT> if prev_action is not None and prev_state is not None: <NEW_LINE> <INDENT> prev_encapsulated_boards = self.hot_boards(prev_state) <NEW_LINE> prev_winning_vec = self.get_winning_vector_with_enemies(prev_state, self.id, self.enemy_id) <NEW_LINE> self.transitions_memory.append(TransitionBatch(prev_encapsulated_boards, prev_action, reward, encapsulated_boards, prev_winning_vec, winning_vec)) <NEW_LINE> if len(self.transitions_memory) >= self.memory_limit: <NEW_LINE> <INDENT> self.transitions_memory.popleft() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> print("Exception in act: %s %s" %(type(ex), ex)) <NEW_LINE> action = np.random.choice(np.arange(NUM_ACTION)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> return action | Perform round of act - choose and return action. | 625941bd8e7ae83300e4aed6 |
def addheader(self): <NEW_LINE> <INDENT> pass | Method to be overridden to initialise headers, etc. | 625941bdbe383301e01b5395 |
def test_default_serialization(self): <NEW_LINE> <INDENT> resource = Resource('http://httpbin.org/post') <NEW_LINE> data = {'a': 'b', 'c': ['d', 'e']} <NEW_LINE> response = resource.post(data) <NEW_LINE> self.assertLess(response.status_code, 300) <NEW_LINE> response_data = response.json() <NEW_LINE> self.assertEqual( 'application/json', response_data['headers']['Content-Type'], ) <NEW_LINE> self.assertEqual(data, response_data['json']) | Check if deserialized data is serialized properly with default format | 625941bd5f7d997b8717499e |
def samba4(self, name, args=None): <NEW_LINE> <INDENT> if args is None: <NEW_LINE> <INDENT> args = [] <NEW_LINE> <DEDENT> return getattr(Samba4(), name)(*args) | Temporary wrapper to use Samba4 over middlewared | 625941bd4e696a04525c9356 |
def _gen_fixture(self): <NEW_LINE> <INDENT> self._gen_creator() <NEW_LINE> self._gen_fabricator() <NEW_LINE> self._gen_by_type() <NEW_LINE> self._gen_physical() | create default set for this fixture | 625941bd5fdd1c0f98dc013c |
def require_chanmsg(message=None): <NEW_LINE> <INDENT> def actual_decorator(function): <NEW_LINE> <INDENT> @functools.wraps(function) <NEW_LINE> def _nop(*args, **kwargs): <NEW_LINE> <INDENT> bot, trigger = args[0:2] <NEW_LINE> if not trigger.is_privmsg: <NEW_LINE> <INDENT> return function(*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if message and not callable(message): <NEW_LINE> <INDENT> bot.say(message) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return _nop <NEW_LINE> <DEDENT> if callable(message): <NEW_LINE> <INDENT> return actual_decorator(message) <NEW_LINE> <DEDENT> return actual_decorator | Decorator, this allows functions to specify if they should be only
allowed via channel message.
If it is not, `message` will be said if given. | 625941bda8370b77170527aa |
def init(): <NEW_LINE> <INDENT> @click.command() <NEW_LINE> @click.option('--cell', required=True, envvar='TREADMILL_CELL', callback=cli.handle_context_opt, expose_value=False) <NEW_LINE> @click.option('--ssh', help='SSH client to use.', type=click.Path(exists=True, readable=True)) <NEW_LINE> @click.argument('app') <NEW_LINE> @click.argument('command', nargs=-1) <NEW_LINE> def ssh(ssh, app, command): <NEW_LINE> <INDENT> if ssh is None: <NEW_LINE> <INDENT> ssh = _DEFAULT_SSH <NEW_LINE> <DEDENT> if app.find('#') == -1: <NEW_LINE> <INDENT> raise click.BadParameter('Specify full instance name: xxx#nnn') <NEW_LINE> <DEDENT> app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh') <NEW_LINE> app_discovery.sync() <NEW_LINE> for (endpoint, hostport) in app_discovery.iteritems(): <NEW_LINE> <INDENT> _LOGGER.info('%s :: %s', endpoint, hostport) <NEW_LINE> if hostport: <NEW_LINE> <INDENT> host, port = hostport.split(b':') <NEW_LINE> run_ssh(host, port, ssh, list(command)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ssh | Return top level command handler. | 625941bdc432627299f04b4e |
def __init__(self, file1, file2): <NEW_LINE> <INDENT> self.tree_queue = queue.PriorityQueue() <NEW_LINE> self.worker1 = threading.Thread(target=self._worker, args=(1, file1,)) <NEW_LINE> self.worker2 = threading.Thread(target=self._worker, args=(2, file2,)) | __init__: class constructor
:param file1: first html file
:param file2: second html file | 625941bdcc0a2c11143dcd9a |
def create(self, networkipv4s): <NEW_LINE> <INDENT> data = {'networks': networkipv4s} <NEW_LINE> return super(ApiNetworkIPv4, self).post('api/v3/networkv4/', data) | Method to create network-ipv4's
:param networkipv4s: List containing networkipv4's desired to be created on database
:return: None | 625941bde1aae11d1e749bbf |
def test_hca_task_after_pme(self): <NEW_LINE> <INDENT> self.check_number_of_activities(1) <NEW_LINE> self.test_utils.start_pme() <NEW_LINE> self.check_number_of_activities(1) | Test that the HCA's task is no longer available after PME | 625941bdfff4ab517eb2f344 |
def test_a_hbacrule_add_service(self): <NEW_LINE> <INDENT> ret = api.Command['hbacrule_add_service']( self.rule_name, hbacsvc=self.test_service ) <NEW_LINE> assert ret['completed'] == 1 <NEW_LINE> failed = ret['failed'] <NEW_LINE> assert 'memberservice' in failed <NEW_LINE> assert 'hbacsvc' in failed['memberservice'] <NEW_LINE> assert not failed['memberservice']['hbacsvc'] <NEW_LINE> entry = ret['result'] <NEW_LINE> assert_attr_equal(entry, 'memberservice_hbacsvc', self.test_service) | Test adding service to HBAC rule using `xmlrpc.hbacrule_add_service`. | 625941bda8ecb033257d2fd8 |
def __add__(self, other): <NEW_LINE> <INDENT> return self.unit_ideal() + other | Every order equals its own unit ideal. Overload ideal addition
to orders.
EXAMPLES::
sage: Q.<i,j,k> = QuaternionAlgebra(-1,-11)
sage: O = Q.maximal_order()
sage: I = O + O*((j-3)/5); I
Fractional ideal (1/10 + 3/10*j, 1/10*i + 3/10*k, j, k) | 625941bda8370b77170527ab |
def auth(self): <NEW_LINE> <INDENT> sep = os.sep <NEW_LINE> users = config["users"] <NEW_LINE> if self.login_times >= 3: <NEW_LINE> <INDENT> logger("登陆失败次数达到上限", "ERROR", str(self.addr)) <NEW_LINE> selector.unregister(self.conn) <NEW_LINE> <DEDENT> if self.login_times < 3 and not self.login: <NEW_LINE> <INDENT> m = hashlib.md5() <NEW_LINE> auth_data = "" <NEW_LINE> try: <NEW_LINE> <INDENT> auth_data = self.conn.recv(1024).decode("utf-8").split() <NEW_LINE> user_info = users[auth_data[0]] <NEW_LINE> if user_info: <NEW_LINE> <INDENT> password = user_info["key"] <NEW_LINE> m.update(password.encode("utf-8")) <NEW_LINE> pass_md5 = m.hexdigest() <NEW_LINE> if pass_md5 == auth_data[1]: <NEW_LINE> <INDENT> pwd = config["users"][user_info["name"]]["pwd"].rstrip(sep) <NEW_LINE> if not pwd: <NEW_LINE> <INDENT> pwd = user_info["home"].rstrip(sep) <NEW_LINE> config["users"][user_info["name"]]["pwd"] = pwd <NEW_LINE> <DEDENT> self.conn.send(("Success" + " " + pwd).encode("utf-8")) <NEW_LINE> self.user = config["users"][user_info["name"]] <NEW_LINE> self.login = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger("用户[%s]登陆失败:密码错误" % auth_data[0], "ERROR", str(self.addr)) <NEW_LINE> self.conn.send("密码错误".encode("utf-8")) <NEW_LINE> del m <NEW_LINE> self.login_times += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> logger("用户[%s]登陆失败:用户名不存在" % auth_data[0] if auth_data else "None", "ERROR", str(self.addr)) <NEW_LINE> self.conn.send("用户名不存在".encode("utf-8")) <NEW_LINE> self.login_times += 1 <NEW_LINE> <DEDENT> except (ConnectionResetError, ConnectionAbortedError) as e: <NEW_LINE> <INDENT> logger("客户端连接中断:[%s]" % e, "WARN", str(self.addr)) <NEW_LINE> print("客户[%s]端连接中断..." % str(self.addr)) <NEW_LINE> selector.unregister(self.conn) | 用于进行远程用户认证,认证成功后绑定用户与连接
:return: None | 625941bd046cf37aa974cc54 |
def run_websocket(self): <NEW_LINE> <INDENT> if hasattr(self, 'prevent_wsgi_call') and self.prevent_wsgi_call: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.application(self.environ, self._fake_start_response) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.websocket.close() | Called when a websocket has been created successfully. | 625941bddd821e528d63b0b5 |
def max_value(arg_list): <NEW_LINE> <INDENT> return max(arg_list) | Devuelve el maximo de los valores pasados
:param arg_list: indefinido
:return: valor maximo | 625941bd462c4b4f79d1d5da |
def cancel_workunits(root_dag, processes): <NEW_LINE> <INDENT> L.debug("Canceling work units %s" % [p.workunit_name for p in processes]) <NEW_LINE> for proc in processes: <NEW_LINE> <INDENT> childlist = [P for P in running_children if proc.workunit_name == P[0]] <NEW_LINE> if childlist: <NEW_LINE> <INDENT> pid = childlist[0][1] <NEW_LINE> send_kill_signal(proc.workunit_name, pid, root_dag.message_queue) | Takes a list of processes and sends a kill signal.
@param root_dag: Main DAG object
@type root_dag: dag.DAG
@param processes: List of processes to be cancelled
@type processes: list of dag.Process | 625941bd91f36d47f21ac3fa |
def light_brightness(state, bulb_id, bridge_key = bulb_key): <NEW_LINE> <INDENT> api_url = 'http://{0}/api/{1}/lights/{2}/state'.format(bridge_ip(), bridge_key, bulb_id) <NEW_LINE> if state < 1 or state > 100: <NEW_LINE> <INDENT> return {"error": "light brightness should be between 1 or 100"} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bri_lvl = int((state / 100) * 254) <NEW_LINE> data = json.dumps({'on':True, "bri":bri_lvl}) <NEW_LINE> result = requests.put(api_url, data) <NEW_LINE> if 'success' in result: <NEW_LINE> <INDENT> print('Bulb {0} brightness changed to {1}'.format(bulb_id, state)) <NEW_LINE> <DEDENT> return result.text | state: 50, bulb_id: E.g. 1 -> [50, 1] -- 50 percent brightness on bulb 1 | 625941bd7047854f462a1316 |
def _gpsTimeToTime(self, week, sec): <NEW_LINE> <INDENT> epoch = 86400*(10*365 + (1980-1969)/4 + 1 + 6 - 2) <NEW_LINE> return epoch + 86400*7*week + sec - 15 | convert GPS week and TOW to a time in seconds since 1970 | 625941bdbe383301e01b5396 |
def noevalify(expr, include=None): <NEW_LINE> <INDENT> mapping = noevalmapping() <NEW_LINE> while True: <NEW_LINE> <INDENT> old_expr = expr <NEW_LINE> for eval_function, noeval_function in mapping.items(): <NEW_LINE> <INDENT> if include is not None and eval_function not in include: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> expr = expr.replace(eval_function, noeval_function) <NEW_LINE> <DEDENT> if old_expr == expr: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not is_eval_free(expr, include=include): <NEW_LINE> <INDENT> raise RuntimeError("Noevalify hasn't worked.") <NEW_LINE> <DEDENT> return expr | Replace instances of sympy classes with their corresponding noeval subclass.
| 625941bd6aa9bd52df036cad |
def set_grant_type(self, grant_type = 'client_credentials', api_key=None, api_secret=None, scope=None, info=None): <NEW_LINE> <INDENT> if api_key and api_secret: <NEW_LINE> <INDENT> self._grant_info['key'] = api_key <NEW_LINE> self._grant_info['secret'] = api_secret <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise DailymotionClientError('Missing API key/secret') <NEW_LINE> <DEDENT> if isinstance(info, dict): <NEW_LINE> <INDENT> self._grant_info.update(info) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info = {} <NEW_LINE> <DEDENT> if self._session_store_enabled and isinstance(info, dict) and info.get('username') is not None: <NEW_LINE> <INDENT> self._session_store.set_user(info.get('username')) <NEW_LINE> <DEDENT> if grant_type in ('authorization', 'token'): <NEW_LINE> <INDENT> grant_type = 'authorization' <NEW_LINE> if 'redirect_uri' not in info: <NEW_LINE> <INDENT> raise DailymotionClientError('Missing redirect_uri in grant info for token grant type.') <NEW_LINE> <DEDENT> <DEDENT> elif grant_type in ('client_credentials', 'none'): <NEW_LINE> <INDENT> grant_type = 'client_credentials' <NEW_LINE> <DEDENT> elif grant_type == 'password': <NEW_LINE> <INDENT> if 'username' not in info or 'password' not in info: <NEW_LINE> <INDENT> raise DailymotionClientError('Missing username or password in grant info for password grant type.') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise DailymotionClientError('Invalid grant type %s.' % grant_type) <NEW_LINE> <DEDENT> self._grant_type = grant_type <NEW_LINE> if scope: <NEW_LINE> <INDENT> if not isinstance(scope, (list, tuple)): <NEW_LINE> <INDENT> raise DailymotionClientError('Invalid scope type: must be a list of valid scopes') <NEW_LINE> <DEDENT> self._grant_info['scope'] = scope | Grant types:
- token:
An authorization is requested to the end-user by redirecting it to an authorization page hosted
on Dailymotion. Once authorized, a refresh token is requested by the API client to the token
server and stored in the end-user's cookie (or other storage technique implemented by subclasses).
The refresh token is then used to request time limited access token to the token server.
- none / client_credentials:
This grant type is a 2 legs authentication: it doesn't allow to act on behalf of another user.
With this grant type, all API requests will be performed with the user identity of the API key owner.
- password:
This grant type allows to authenticate end-user by directly providing its credentials.
This profile is highly discouraged for web-server workflows. If used, the username and password
MUST NOT be stored by the client. | 625941bd097d151d1a222d66 |
def get_event_count(fname): <NEW_LINE> <INDENT> fname = pathlib.Path(fname).resolve() <NEW_LINE> ext = fname.suffix <NEW_LINE> if ext == ".rtdc": <NEW_LINE> <INDENT> with h5py.File(path_to_str(fname), mode="r") as h5: <NEW_LINE> <INDENT> event_count = h5.attrs["experiment:event count"] <NEW_LINE> <DEDENT> <DEDENT> elif ext == ".tdms": <NEW_LINE> <INDENT> mdir = fname.parent <NEW_LINE> mid = fname.name.split("_")[0] <NEW_LINE> logf = mdir / (mid + "_log.ini") <NEW_LINE> avif = mdir / (mid + "_imaq.avi") <NEW_LINE> if logf.exists(): <NEW_LINE> <INDENT> with logf.open(encoding='utf-8') as fd: <NEW_LINE> <INDENT> logd = fd.readlines() <NEW_LINE> <DEDENT> for l in logd: <NEW_LINE> <INDENT> if l.strip().startswith("Events:"): <NEW_LINE> <INDENT> event_count = int(l.split(":")[1]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif avif.exists(): <NEW_LINE> <INDENT> event_count = get_event_count_cache(avif) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event_count = get_event_count_cache(fname) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("`fname` must be an .rtdc or .tdms file!") <NEW_LINE> <DEDENT> return event_count | Get the number of events in a data set
Parameters
----------
fname: str
Path to an experimental data file. The file format is
determined from the file extension (tdms or rtdc).
Returns
-------
event_count: int
The number of events in the data set
Notes
-----
For tdms-based data sets, there are multiple ways of determining
the number of events, which are used in the following order
(according to which is faster):
1. The MX_log.ini file "Events" tag
2. The number of frames in the avi file
3. The tdms file (very slow, because it loads the entire tdms file)
The values obtained with this method are cached on disk to
speed up future calls with the same argument.
See Also
--------
get_event_count_cache: cached event counts from tdms/avi files | 625941bd26068e7796caebe4 |
def safety_predict(self, joint_angles): <NEW_LINE> <INDENT> rs = moveit_msgs.msg.RobotState() <NEW_LINE> for joint in joint_angles: <NEW_LINE> <INDENT> rs.joint_state.name.append(joint) <NEW_LINE> rs.joint_state.position.append(joint_angles[joint]) <NEW_LINE> <DEDENT> result = self._sv.get_state_validity(rs, self._moveit_group) <NEW_LINE> return result.valid | Will robot be in safe state.
:param joint_angles: {'': float}
:return safe: Bool
if robot is safe. | 625941bd925a0f43d2549d7e |
def create_resource(self, resource_id, resource_common_name=None, resource_description=None, resource_type=None, **kwargs): <NEW_LINE> <INDENT> new_resource = gdata.calendar_resource.data.CalendarResourceEntry( resource_id=resource_id, resource_common_name=resource_common_name, resource_description=resource_description, resource_type=resource_type) <NEW_LINE> return self.post(new_resource, self.MakeResourceFeedUri(), **kwargs) | Creates a calendar resource with the given properties.
Args:
resource_id: string The resource ID of the calendar resource.
resource_common_name: string (optional) The common name of the resource.
resource_description: string (optional) The description of the resource.
resource_type: string (optional) The type of the resource.
Returns:
gdata.calendar_resource.data.CalendarResourceEntry of the new resource. | 625941bd50485f2cf553cca3 |
def remove_fixed_ip(nova,inst_id,fixed_ip,instance_name=None): <NEW_LINE> <INDENT> ha_agent.debug("< %s >: Removing Floating IP < %s >"%(inst_id,fixed_ip)) <NEW_LINE> try: <NEW_LINE> <INDENT> nova.servers.remove_fixed_ip(inst_id,fixed_ip) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> ha_agent.warning("Exception: remove_fixed_ip - %s - from Instance < %s > [%s]"%(fixed_ip,inst_id,instance_name)) <NEW_LINE> ha_agent.exception('') | Input - NovaClient , Instance Id, Fixed IP
Output - NaN
Function - Removes Fixed Ip | 625941bd9f2886367277a79a |
def getDistribution(self): <NEW_LINE> <INDENT> finger_width = self.machine_params['bit_diameter'] - self.cutting_params['fit_factor'] <NEW_LINE> x_straight_cut = self.cutting_params['finger_depth'] - finger_width <NEW_LINE> num_fingers = math.floor(self.workpiece_params['stock_width'] / finger_width) <NEW_LINE> remainder = self.workpiece_params['stock_width'] - (num_fingers * finger_width) <NEW_LINE> return { 'num_fingers': num_fingers, 'remainder': remainder, 'finger_width': finger_width, 'x_straight_cut': x_straight_cut } | Each path cutting the side of a finger is the more representative proxy
for the finger width. | 625941bd5166f23b2e1a5063 |
def save_max_to_file(self, max: dict): <NEW_LINE> <INDENT> search_log.info(f'Saving best configuration to \'{self.output_config_path}\'') <NEW_LINE> search_config = self.bayes_search.get_search_config_from_bounds(max['params']) <NEW_LINE> best_trainer_config = self.bayes_search.get_trainer_config_with_overrides(search_config) <NEW_LINE> command_util.write_yaml_file(best_trainer_config, self.output_config_path) | Constructs a trainer configuration dictionary from a BayesianOptimization object's max property and saves it to file.
Parameters:
max: dict: The max property of a BayesianOptimization object. | 625941bd2eb69b55b151c7b6 |
def compute_initial_step(self, n_samples): <NEW_LINE> <INDENT> initial_step = 0. <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> pred = numpy.zeros(n_samples) + initial_step <NEW_LINE> target, weight = self.loss.prepare_tree_params(pred) <NEW_LINE> initial_step += numpy.average(target, weights=weight) <NEW_LINE> <DEDENT> return initial_step | Compute initial approximation | 625941bd0c0af96317bb80f3 |
def searchInsert(self, nums, target): <NEW_LINE> <INDENT> for index, value in enumerate(nums): <NEW_LINE> <INDENT> if value < target: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return index <NEW_LINE> <DEDENT> return len(nums) | :type nums: List[int]
:type target: int
:rtype: int | 625941bd656771135c3eb776 |
def read_excel_csv_file(file_name): <NEW_LINE> <INDENT> csv_table = [] <NEW_LINE> with open(file_name, newline='') as csv_file: <NEW_LINE> <INDENT> csv_reader = csv.reader(csv_file, delimiter=',') <NEW_LINE> count = 0 <NEW_LINE> for row in csv_reader: <NEW_LINE> <INDENT> if count == 1: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> csv_table.append(row) <NEW_LINE> <DEDENT> <DEDENT> for item in csv_table: <NEW_LINE> <INDENT> for index in range(len(item)): <NEW_LINE> <INDENT> if item[index] == "": <NEW_LINE> <INDENT> item[index] = 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> del csv_table[0] <NEW_LINE> del csv_table[len(csv_table) - 8:] <NEW_LINE> return csv_table | Given a CSV file, this reads the data into a nested list
Args : a string corresponding to comma-separated CSV file
Returns : a nested list consisting of the fields in the CSV file. Empty strings are set to 0, and irrelevant lines are deleted. | 625941bd4a966d76dd550f17 |
def query(self, rel_type, params=None): <NEW_LINE> <INDENT> template = self.template(rel_type) <NEW_LINE> if template is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if params is not None: <NEW_LINE> <INDENT> endpoint = make_query(template, params) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> endpoint = make_query(template) <NEW_LINE> <DEDENT> return endpoint | Returns constructed url with query parameters for urn
type requested. To see which params are expected first
run `query_template(rel_type)`.
Raises BadQuery if params are not valid.
Args:
rel_type -- urn type we want to query
Kwargs:
params -- dict of param values | 625941bd4f6381625f114947 |
def get_chronological_speaker_list(self, selector: Callable[[Speaker], bool] = lambda speaker: True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> chrono_utts = sorted(list(self.iter_utterances()), key=lambda utt: utt.timestamp) <NEW_LINE> return [utt.speaker for utt in chrono_utts if selector(utt.speaker)] <NEW_LINE> <DEDENT> except TypeError as e: <NEW_LINE> <INDENT> raise ValueError(str(e) + "\nUtterance timestamps may not have been set correctly.") | Get the speakers in the conversation sorted in chronological order (speakers may appear more than once)
:param selector: (lambda) function for which speakers should be included; all speakers are included by default
:return: list of speakers for each chronological utterance | 625941bd287bf620b61d3970 |
def __init__(self, response, connection): <NEW_LINE> <INDENT> self.body = self._decompress_response(response=response) <NEW_LINE> if PY3: <NEW_LINE> <INDENT> self.body = b(self.body).decode('utf-8') <NEW_LINE> <DEDENT> self.status = response.status <NEW_LINE> self.headers = dict(response.getheaders()) <NEW_LINE> self.error = response.reason <NEW_LINE> self.connection = connection <NEW_LINE> self.invalid = LinodeException(0xFF, "Invalid JSON received from server") <NEW_LINE> self.objects, self.errors = self.parse_body() <NEW_LINE> if not self.success(): <NEW_LINE> <INDENT> raise self.errors[0] | Instantiate a LinodeResponse from the HTTP response
:keyword response: The raw response returned by urllib
:return: parsed :class:`LinodeResponse` | 625941bd7cff6e4e81117890 |
def _extract_log_gatling_21(self, fields): <NEW_LINE> <INDENT> if fields[2].strip() == "USER": <NEW_LINE> <INDENT> if fields[3].strip() == "START": <NEW_LINE> <INDENT> self.concurrency += 1 <NEW_LINE> <DEDENT> elif fields[3].strip() == "END": <NEW_LINE> <INDENT> self.concurrency -= 1 <NEW_LINE> <DEDENT> <DEDENT> if fields[2].strip() != "REQUEST": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> label = fields[4] <NEW_LINE> t_stamp = int(fields[8]) / 1000.0 <NEW_LINE> r_time = (int(fields[8]) - int(fields[5])) / 1000.0 <NEW_LINE> latency = (int(fields[7]) - int(fields[6])) / 1000.0 <NEW_LINE> con_time = (int(fields[6]) - int(fields[5])) / 1000.0 <NEW_LINE> if fields[-1] == 'OK': <NEW_LINE> <INDENT> r_code = '200' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _tmp_rc = fields[-1].split(" ")[-1] <NEW_LINE> r_code = _tmp_rc if _tmp_rc.isdigit() else 'No RC' <NEW_LINE> <DEDENT> if len(fields) >= 11 and fields[10]: <NEW_LINE> <INDENT> error = fields[10] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error = None <NEW_LINE> <DEDENT> return int(t_stamp), label, r_time, con_time, latency, r_code, error | Extract stats from Gatling 2.1 format.
:param fields:
:return: | 625941bdfbf16365ca6f60c8 |
def detect_landmarks(self, limit=10): <NEW_LINE> <INDENT> features = [Feature(FeatureTypes.LANDMARK_DETECTION, limit)] <NEW_LINE> return self._detect_annotation(features) | Detect landmarks in an image.
:type limit: int
:param limit: The maximum number of landmarks to find.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`. | 625941bd3eb6a72ae02ec3e0 |
def is_hyper(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if __grains__['virtual_subtype'] != 'Xen Dom0': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with salt.utils.fopen('/proc/modules') as fp_: <NEW_LINE> <INDENT> if 'xen_' not in fp_.read(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return 'xenstore' in __salt__['cmd.run'](__grains__['ps']) | Returns a bool whether or not this node is a hypervisor of any kind
CLI Example:
.. code-block:: bash
salt '*' virt.is_hyper | 625941bd293b9510aa2c31a3 |
def __init__(self, study_type=None, name=None, sim_types=None, inputs=None, pool_type=None, build_pool_type=None, state=None, valid_for_transient=None, valid_for_inline=None, previous_definitions=None, implicit_sim_types=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._study_type = None <NEW_LINE> self._name = None <NEW_LINE> self._sim_types = None <NEW_LINE> self._inputs = None <NEW_LINE> self._pool_type = None <NEW_LINE> self._build_pool_type = None <NEW_LINE> self._state = None <NEW_LINE> self._valid_for_transient = None <NEW_LINE> self._valid_for_inline = None <NEW_LINE> self._previous_definitions = None <NEW_LINE> self._implicit_sim_types = None <NEW_LINE> self.discriminator = None <NEW_LINE> if study_type is not None: <NEW_LINE> <INDENT> self.study_type = study_type <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if sim_types is not None: <NEW_LINE> <INDENT> self.sim_types = sim_types <NEW_LINE> <DEDENT> if inputs is not None: <NEW_LINE> <INDENT> self.inputs = inputs <NEW_LINE> <DEDENT> if pool_type is not None: <NEW_LINE> <INDENT> self.pool_type = pool_type <NEW_LINE> <DEDENT> if build_pool_type is not None: <NEW_LINE> <INDENT> self.build_pool_type = build_pool_type <NEW_LINE> <DEDENT> if state is not None: <NEW_LINE> <INDENT> self.state = state <NEW_LINE> <DEDENT> if valid_for_transient is not None: <NEW_LINE> <INDENT> self.valid_for_transient = valid_for_transient <NEW_LINE> <DEDENT> if valid_for_inline is not None: <NEW_LINE> <INDENT> self.valid_for_inline = valid_for_inline <NEW_LINE> <DEDENT> if previous_definitions is not None: <NEW_LINE> <INDENT> self.previous_definitions = previous_definitions <NEW_LINE> <DEDENT> if implicit_sim_types is not None: <NEW_LINE> <INDENT> self.implicit_sim_types = implicit_sim_types | StudyTypeDefinition - a model defined in OpenAPI | 625941bd596a8972360899ce |
def update_logging_config(context: Any, log_name: Optional[str] = None, file_name: str = "worker.log") -> None: <NEW_LINE> <INDENT> log_name = log_name or __name__.split(".")[0] <NEW_LINE> top_level_logger = logging.getLogger(log_name) <NEW_LINE> datefmt = context.config["log_datefmt"] <NEW_LINE> fmt = context.config["log_fmt"] <NEW_LINE> formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) <NEW_LINE> if context.config.get("verbose"): <NEW_LINE> <INDENT> top_level_logger.setLevel(logging.DEBUG) <NEW_LINE> if len(top_level_logger.handlers) == 0: <NEW_LINE> <INDENT> handler = logging.StreamHandler() <NEW_LINE> handler.setFormatter(formatter) <NEW_LINE> top_level_logger.addHandler(handler) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> top_level_logger.setLevel(logging.INFO) <NEW_LINE> <DEDENT> makedirs(context.config["log_dir"]) <NEW_LINE> path = os.path.join(context.config["log_dir"], file_name) <NEW_LINE> if context.config["watch_log_file"]: <NEW_LINE> <INDENT> handler = logging.handlers.WatchedFileHandler(path) <NEW_LINE> <DEDENT> elif context.config["log_max_bytes"] and context.config["log_max_backups"]: <NEW_LINE> <INDENT> handler = logging.handlers.RotatingFileHandler( filename=path, maxBytes=context.config["log_max_bytes"], backupCount=context.config["log_max_backups"], ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> handler = logging.FileHandler(path) <NEW_LINE> <DEDENT> handler.setFormatter(formatter) <NEW_LINE> top_level_logger.addHandler(handler) <NEW_LINE> top_level_logger.addHandler(logging.NullHandler()) | Update python logging settings from config.
By default, this sets the ``scriptworker`` log settings, but this will
change if some other package calls this function or specifies the ``log_name``.
* Use formatting from config settings.
* Log to screen if ``verbose``
* Add a rotating logfile from config settings.
Args:
context (scriptworker.context.Context): the scriptworker context.
log_name (str, optional): the name of the Logger to modify.
If None, use the top level module ('scriptworker').
Defaults to None. | 625941bddc8b845886cb543e |
def nextPermutation(self, nums: List[int]) -> None: <NEW_LINE> <INDENT> if len(nums)==1: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif len(nums)==2: <NEW_LINE> <INDENT> trade(nums,0,1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if nums==sorted(nums,reverse=True): <NEW_LINE> <INDENT> trade(nums,nums.index(min(nums)),0) <NEW_LINE> nums[1:]=sorted(nums[1:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(2,len(nums)+1): <NEW_LINE> <INDENT> if nums[i*-1:]==sorted(nums[i*-1:],reverse=True): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> now=nums[i*-1:] <NEW_LINE> t=min([a for a in now[1:] if a>now[0]]) <NEW_LINE> now_i=now.index(t) <NEW_LINE> trade(now,0,now_i) <NEW_LINE> now[1:]=sorted(now[1:]) <NEW_LINE> print(now,t,now_i) <NEW_LINE> nums[i*-1:]=now <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print(nums) | Do not return anything, modify nums in-place instead. | 625941bdd99f1b3c44c6749f |
def do_shutdown(self, restart=False): <NEW_LINE> <INDENT> pass | Callback to do stuff on kernel shutdown
:param restart: ignored | 625941bdfb3f5b602dac359b |
def make_update_sql(table, data, condition): <NEW_LINE> <INDENT> key_values = [] <NEW_LINE> for key, value in data.items(): <NEW_LINE> <INDENT> value = format_sql_value(value) <NEW_LINE> if isinstance(value, str): <NEW_LINE> <INDENT> key_values.append("`{}`={}".format(key, repr(value))) <NEW_LINE> <DEDENT> elif value is None: <NEW_LINE> <INDENT> key_values.append("`{}`={}".format(key, "null")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key_values.append("`{}`={}".format(key, value)) <NEW_LINE> <DEDENT> <DEDENT> key_values = ", ".join(key_values) <NEW_LINE> sql = "update `{table}` set {key_values} where {condition}" <NEW_LINE> sql = sql.format(table=table, key_values=key_values, condition=condition) <NEW_LINE> return sql | @summary: 适用于mysql, oracle数据库时间需要to_date 处理(TODO)
---------
@param table:
@param data: 表数据 json格式
@param condition: where 条件
---------
@result: | 625941bdd164cc6175782c58 |
def get_website(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> html = urlopen('http://www.usu.edu.au/Our-Clubs-Societies/Our-clubs-societies.aspx') <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> print('Cannot connect to website at this time.') <NEW_LINE> return <NEW_LINE> <DEDENT> if html is None: <NEW_LINE> <INDENT> print('Server not found.') <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Success in connecting!") <NEW_LINE> return html | Ensure call to USU website is successful. | 625941bdbaa26c4b54cb102d |
def mark_message_as_used(self, chat_id: int, user_id: int, message_id: int): <NEW_LINE> <INDENT> self.blog.debug(f'Adding message to messages table; message #{message_id} in chat #{chat_id} ' f'for user {user_id}') <NEW_LINE> self.db.run_single_update_query('insert into messages (message_id, chat_id, user_id) values (%s, %s, %s)', (message_id, chat_id, user_id)) | Mark that user already have changed karma due to given message | 625941bdbe7bc26dc91cd510 |
def setAstroChart2WithBirthInfo(self): <NEW_LINE> <INDENT> localizedDt = self.birthInfo.getBirthLocalizedDatetime() <NEW_LINE> if self.astrologyChartWidgetEnabled: <NEW_LINE> <INDENT> self.astrologyChartWidget.setAstroChart2Datetime(localizedDt) <NEW_LINE> <DEDENT> if self.planetaryInfoTableWidgetEnabled: <NEW_LINE> <INDENT> self._updatePlanetaryInfoTable(localizedDt) | Sets AstroChart2 with the info in the BirthInfo of this document.
| 625941bda934411ee375159d |
def rotctld_angle(rotctl): <NEW_LINE> <INDENT> rotctl.send(b'p\n') <NEW_LINE> azimuth, elevation = rotctl.recv(1024).decode('ascii').splitlines() <NEW_LINE> return azimuth, elevation | Get azimuth,elevation from rotctld server connection. | 625941bd6fb2d068a760efa5 |
@login_required <NEW_LINE> def update(request): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> form = ProfileForm(request.POST, request.FILES) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> user = User.objects.get(id=request.user.id) <NEW_LINE> user.first_name = form.cleaned_data['first_name'] <NEW_LINE> user.last_name = form.cleaned_data['last_name'] <NEW_LINE> user.save() <NEW_LINE> profile = Profile.objects.get(user=user) <NEW_LINE> profile.phone = form.cleaned_data['phone'] <NEW_LINE> is_legal = int(form.cleaned_data['is_legal']) <NEW_LINE> try: <NEW_LINE> <INDENT> profile.img = handle_uploaded_file(request.FILES['img'], 'user_pic') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if is_legal == 0: <NEW_LINE> <INDENT> profile.is_legal = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> profile.is_legal = True <NEW_LINE> <DEDENT> profile.save() <NEW_LINE> form = LegalForm(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> legal = Legal.objects.get(profile=profile) <NEW_LINE> legal.title = form.cleaned_data['legal_name'] <NEW_LINE> legal.inn = form.cleaned_data['inn'] <NEW_LINE> legal.kpp = form.cleaned_data['kpp'] <NEW_LINE> legal.bik = form.cleaned_data['bik'] <NEW_LINE> legal.rs = int(form.cleaned_data['rs']) <NEW_LINE> legal.ks = int(form.cleaned_data['ks']) <NEW_LINE> legal.post = form.cleaned_data['post'] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> legal = Legal( profile = profile, title = form.cleaned_data['legal_name'], inn = form.cleaned_data['inn'], kpp = form.cleaned_data['kpp'], bik = form.cleaned_data['bik'], rs = int(form.cleaned_data['rs']), ks = int(form.cleaned_data['ks']), post = form.cleaned_data['post'], ) <NEW_LINE> <DEDENT> legal.save() <NEW_LINE> <DEDENT> form = DeliveryForm(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> delivery = Delivery.objects.get(profile=profile) <NEW_LINE> delivery.title = form.cleaned_data['delivery_name'] <NEW_LINE> delivery.address = form.cleaned_data['address'] <NEW_LINE> delivery.city = form.cleaned_data['city'] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> delivery = Delivery( profile = profile, title = form.cleaned_data['delivery_name'], address = form.cleaned_data['address'], city = form.cleaned_data['city'] ) <NEW_LINE> <DEDENT> delivery.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return HttpResponseRedirect('/profile/') | Данная функция отвечает за обноление всех данных о пользователе: Профиль, Юр. данные, Инф. о доставке.
Она принимает на вход всего 1 форму и в зависимости от параметров расбрасывает эти данные в набор моделей. | 625941bdcdde0d52a9e52f3a |
def extract_id(self, url: str) -> str: <NEW_LINE> <INDENT> gnd_id = re.search(self.ID_PATTERN, url) <NEW_LINE> if gnd_id is None: <NEW_LINE> <INDENT> raise GNDIdError(f'Could not find GND-ID in "{url}"') <NEW_LINE> <DEDENT> return gnd_id.group() | Extract the GND-ID from an GND-URL.
:param url: A GND-URL, e.g. http://d-nb.info/gnd/118650130
:type url: str
:raises: GNDIdError if no GND-ID is found.
:return: The GND-ID, e.g. 118650130
:rtype: str | 625941bd24f1403a92600a74 |
def fixup_resnet110(**kwargs): <NEW_LINE> <INDENT> model = FixupResNet(FixupBasicBlock, [18, 18, 18], **kwargs) <NEW_LINE> return model | Constructs a Fixup-ResNet-110 model.
| 625941bd283ffb24f3c5580f |
def test_in_use_flag(self): <NEW_LINE> <INDENT> @asyncio.coroutine <NEW_LINE> def test(): <NEW_LINE> <INDENT> connection = yield from Connection.create(port=PORT, poolsize=10) <NEW_LINE> for i in range(0, 10): <NEW_LINE> <INDENT> yield from connection.delete([ 'my-list-%i' % i ]) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def sink(i): <NEW_LINE> <INDENT> the_list, result = yield from connection.blpop(['my-list-%i' % i]) <NEW_LINE> <DEDENT> for i in range(0, 10): <NEW_LINE> <INDENT> self.assertEqual(connection.connections_in_use, i) <NEW_LINE> asyncio.Task(sink(i)) <NEW_LINE> yield from asyncio.sleep(.1) <NEW_LINE> <DEDENT> with self.assertRaises(RedisException) as e: <NEW_LINE> <INDENT> yield from connection.delete([ 'my-list-one-more' ]) <NEW_LINE> yield from connection.blpop(['my-list-one-more']) <NEW_LINE> <DEDENT> self.assertEqual(e.exception.args[0], 'All connection in the pool are in use. Please increase the poolsize.') <NEW_LINE> <DEDENT> self.loop.run_until_complete(test()) | Do several blocking calls and see whether in_use increments. | 625941bd07d97122c4178790 |
def getPilotData (self, version_): <NEW_LINE> <INDENT> if not version_ in self.versions: <NEW_LINE> <INDENT> self._log("get-pilot-data").error("getPilotData() called, version %s unknown", version_) <NEW_LINE> raise InstallException("Internal error detected in getPilotData()") <NEW_LINE> <DEDENT> data = self.versions[version_].get(self.kTokenPilotData) <NEW_LINE> self._log("get-pilot-data").info("getPilotData(version=%s) returning '%s'", version_, data) <NEW_LINE> return data | Gets pilot data. If none, returns None | 625941bd377c676e912720b4 |
def test_managed_show_changes_true(self): <NEW_LINE> <INDENT> name = os.path.join(RUNTIME_VARS.TMP, 'grail_not_scene33') <NEW_LINE> with salt.utils.files.fopen(name, 'wb') as fp_: <NEW_LINE> <INDENT> fp_.write(b'test_managed_show_changes_false\n') <NEW_LINE> <DEDENT> ret = self.run_state( 'file.managed', name=name, source='salt://grail/scene33', ) <NEW_LINE> changes = next(six.itervalues(ret))['changes'] <NEW_LINE> self.assertIn('diff', changes) | file.managed test interface | 625941bd67a9b606de4a7dc7 |
def xhtml_escape_recursive(d): <NEW_LINE> <INDENT> if isinstance(d, str): <NEW_LINE> <INDENT> return xhtml_escape(d) <NEW_LINE> <DEDENT> elif isinstance(d, list): <NEW_LINE> <INDENT> return map(xhtml_escape_recursive, d) <NEW_LINE> <DEDENT> elif isinstance(d, dict): <NEW_LINE> <INDENT> return {k: xhtml_escape_recursive(v) for k, v in d.items()} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return d | xhtml escape more complex data structures.
Parameters
----------
d : data-structure, i.e. str or list or dict or combinations thereof
Returns
-------
Same data-structure but with xhtml_escaped fields (not keys). | 625941bd26068e7796caebe5 |
def set_subscribed(email, is_subscribed): <NEW_LINE> <INDENT> model = SubscriptionStateEntity.get_by_key_name(email) <NEW_LINE> if model is None: <NEW_LINE> <INDENT> model = SubscriptionStateEntity(key_name=email) <NEW_LINE> <DEDENT> model.is_subscribed = is_subscribed <NEW_LINE> model.put() | Set the state of a given user.
Args:
email: string. The email address of the user.
is_subscribed: bool. The state to set. True means that the user is
subscribed and should continue to receive emails; False means that
they should not.
Returns:
None. | 625941bd32920d7e50b280d8 |
def ce_loss(logits, labels): <NEW_LINE> <INDENT> return F.cross_entropy(logits, Variable(labels), ignore_index=255) | Calculate cross-entropy loss. | 625941bd4e4d5625662d42e6 |
def make_data_for_atomicNNs(self, GData, OutData=[], GDerivatives=[ ], ForceOutput=[], Normalization=[], AppendForce=True,Placeholder1=[],Placeholder2=[]): <NEW_LINE> <INDENT> CombinedData = [] <NEW_LINE> if AppendForce: <NEW_LINE> <INDENT> for e, f, n in zip(GData, GDerivatives, Normalization): <NEW_LINE> <INDENT> CombinedData.append(e) <NEW_LINE> CombinedData.append(f) <NEW_LINE> CombinedData.append(n) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for Data in GData: <NEW_LINE> <INDENT> CombinedData.append(Data) <NEW_LINE> <DEDENT> <DEDENT> if len(OutData) != 0: <NEW_LINE> <INDENT> CombinedData.append(OutData) <NEW_LINE> if AppendForce: <NEW_LINE> <INDENT> CombinedData.append(ForceOutput) <NEW_LINE> <DEDENT> <DEDENT> return CombinedData | Sorts the symmetry function data for feeding.
For training the output data also has to be added.
Returns:
CombinedData(list): Sorted data for the batch as a list. | 625941bd45492302aab5e1cb |
def _add_placement_provenance(placement, txrx, errors): <NEW_LINE> <INDENT> if isinstance( placement.vertex, AbstractProvidesProvenanceDataFromMachine): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> placement.vertex.get_provenance_data_from_machine( txrx, placement) <NEW_LINE> with ProvenanceWriter() as db: <NEW_LINE> <INDENT> db.add_core_name(placement.x, placement.y, placement.p, placement.vertex.label) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> errors.append(traceback.format_exc()) | :param ~.Placement placement:
:param ~.Transceiver txrx:
:param list(str) errors: | 625941bd0a50d4780f666d9b |
def Main(): <NEW_LINE> <INDENT> trigger = GetTrigger() <NEW_LINE> Notify(trigger) <NEW_LINE> if trigger == Application(): <NEW_LINE> <INDENT> print("application!") <NEW_LINE> <DEDENT> elif trigger == Verification(): <NEW_LINE> <INDENT> print("verification!") <NEW_LINE> <DEDENT> k = 10 <NEW_LINE> print("hello") <NEW_LINE> return k | :return: | 625941bd29b78933be1e55bc |
def test_Helmholtz(self): <NEW_LINE> <INDENT> T = 500 <NEW_LINE> rho = 838.025 <NEW_LINE> fluid = IAPWS95() <NEW_LINE> delta = rho/fluid.rhoc <NEW_LINE> tau = fluid.Tc/T <NEW_LINE> fio, fiot, fiott, fiod, fiodd, fiodt = fluid._phi0(tau, delta) <NEW_LINE> self.assertEqual(round(fio, 8), 2.04797733) <NEW_LINE> self.assertEqual(round(fiod, 9), 0.384236747) <NEW_LINE> self.assertEqual(round(fiodd, 9), -0.147637878) <NEW_LINE> self.assertEqual(round(fiot, 8), 9.04611106) <NEW_LINE> self.assertEqual(round(fiott, 8), -1.93249185) <NEW_LINE> self.assertEqual(round(fiodt, 8), 0.0) <NEW_LINE> fir, firt, firtt, fird, firdd, firdt, firdtt, B, C = fluid._phir(tau, delta) <NEW_LINE> self.assertEqual(round(fir, 8), -3.42693206) <NEW_LINE> self.assertEqual(round(fird, 9), -0.364366650) <NEW_LINE> self.assertEqual(round(firdd, 9), 0.856063701) <NEW_LINE> self.assertEqual(round(firt, 8), -5.81403435) <NEW_LINE> self.assertEqual(round(firtt, 8), -2.23440737) <NEW_LINE> self.assertEqual(round(firdt, 8), -1.12176915) | Table 6 from IAPWS95, pag 14 | 625941bd50812a4eaa59c22f |
def remove_contract(self, msg): <NEW_LINE> <INDENT> self.log.info("Removing contract: %s", msg) <NEW_LINE> self.remove_from_keyword_indexes(msg['contract_id']) <NEW_LINE> self.db_connection.update_entries( "contracts", {"deleted": 1}, {"id": msg["contract_id"]} ) <NEW_LINE> self.update_listings_index() | Remove contract and update own list of contracts keywords | 625941bd2eb69b55b151c7b7 |
def generate_verification_code(n_bits=64): <NEW_LINE> <INDENT> return base58_encode(generate_random_bits(n_bits)) | Compact human-inputable strong verification code.
:param n_bits:
Bit size. 64 is default.
:returns:
A base58-encoded random unsigned integral human-inputable compact
verification code. | 625941bd7b25080760e39365 |
def iter_after_lines(self): <NEW_LINE> <INDENT> start = self.after.offset <NEW_LINE> end = self.after.offset + self.after.numlines <NEW_LINE> return (line for line in self._iter_lines(self.lines[start:end])) | Iterate over the lines in the "after" component of this hunk
| 625941bddc8b845886cb543f |
def getAltAndTitle(self, altortitle, open_link_in_new_window): <NEW_LINE> <INDENT> if open_link_in_new_window: <NEW_LINE> <INDENT> return '%s, %s' % (altortitle.decode('utf-8'), self.portal().translate(_('obrir_link_finestra_nova', default=u"(obriu en una finestra nova)"))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%s' % (altortitle.decode('utf-8')) | Funcio que extreu idioma actiu i afegeix al alt i al title de les imatges del carrousel
el literal Obriu enllac en una finestra nova | 625941bde8904600ed9f1e35 |
def create_tenant_group(self, payload={}): <NEW_LINE> <INDENT> return self._tenantGroups_node_(payload=payload, HTTPmethod='POST') | Create a tenant group.
:param payload: Details the initial state of the tenant
:type payload: Dict
.. seealso:: https://www.acano.com/publications/2015/09/Solution-API-Reference-R1_8.pdf#page=95
.. seealso:: https://www.acano.com/publications/2015/09/Acano-Solution-Multi-tenancy-Considerations1.pdf#page=6
.. note:: v1.8 upward | 625941bd8c3a8732951582c3 |
def getcurrency(data,index): <NEW_LINE> <INDENT> return data[(index-4):(index-1)] | returns the currency based on the provided index | 625941bd097d151d1a222d67 |
def _pre_update(self): <NEW_LINE> <INDENT> self._matrix = self._coo | Do anything that needs to be done at the start of AssembledJacobian._update. | 625941bd56b00c62f0f14563 |
def secure_filename(filename): <NEW_LINE> <INDENT> if isinstance(filename, text_type): <NEW_LINE> <INDENT> from unicodedata import normalize <NEW_LINE> filename = normalize('NFKD', filename).encode('ascii', 'ignore') <NEW_LINE> <DEDENT> for sep in os.path.sep, os.path.altsep: <NEW_LINE> <INDENT> if sep: <NEW_LINE> <INDENT> filename = filename.replace(sep, ' ') <NEW_LINE> <DEDENT> <DEDENT> filename = str(_filename_ascii_strip_re.sub('', '_'.join( filename.split()))).strip('._') <NEW_LINE> if os.name == 'nt' and filename and filename.split('.')[0].upper() in _windows_device_files: <NEW_LINE> <INDENT> filename = '_' + filename <NEW_LINE> <DEDENT> return filename | Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows system the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
.. versionadded:: 0.5
:param filename: the filename to secure | 625941bd6aa9bd52df036cae |
def select_option_from_drop_down_using_value_by_xpath(self, xpath, choice_text): <NEW_LINE> <INDENT> self._select_option_from_drop_down_using_value(By.XPATH, xpath, choice_text) | Given the id of a drop down element, select an option from it, based on the option's value attribute
:param xpath: String - the xpath of the element to look for
:param choice_text: String - the text to select from the drop down
:return: | 625941bd07d97122c4178791 |
def default_action(self): <NEW_LINE> <INDENT> if self._sound is None: <NEW_LINE> <INDENT> logger.debug ("ACTIVATING THE SOUND ACTUATOR") <NEW_LINE> contr = blenderapi.controller() <NEW_LINE> self._sound = contr.actuators['Sound'] <NEW_LINE> contr.activate(self._sound) <NEW_LINE> self._sound.stopSound() <NEW_LINE> <DEDENT> rx, ry, rz = 0.0, 0.0, 0.0 <NEW_LINE> try: <NEW_LINE> <INDENT> rotation = self._speed / self.frequency <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self._moving = False <NEW_LINE> for i in range(6): <NEW_LINE> <INDENT> key = ('seg%d' % i) <NEW_LINE> target_angle = normalise_angle(self.local_data[key]) <NEW_LINE> segment = self._segments[i] <NEW_LINE> rot_matrix = segment.localOrientation <NEW_LINE> segment_matrix = mathutils.Matrix((rot_matrix[0], rot_matrix[1], rot_matrix[2])) <NEW_LINE> segment_euler = segment_matrix.to_euler() <NEW_LINE> if self._dofs[i] == 'y': <NEW_LINE> <INDENT> ry = rotation_direction(segment_euler[1], target_angle, self._tolerance, rotation) <NEW_LINE> <DEDENT> elif self._dofs[i] == 'z': <NEW_LINE> <INDENT> rz = rotation_direction(segment_euler[2], target_angle, self._tolerance, rotation) <NEW_LINE> <DEDENT> logger.debug("ry = %.4f, rz = %.4f" % (ry, rz)) <NEW_LINE> segment.applyRotation([rx, ry, rz], True) <NEW_LINE> if ry != 0.0 or rz != 0.0: <NEW_LINE> <INDENT> self._moving = True <NEW_LINE> <DEDENT> ry = rz = 0 <NEW_LINE> <DEDENT> if self._moving: <NEW_LINE> <INDENT> self._sound.startSound() <NEW_LINE> logger.debug("STARTING SOUND") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._sound.stopSound() <NEW_LINE> logger.debug("STOPPING SOUND") | Apply rotation to the arm segments | 625941bd0383005118ecf4ef |
def update_user_agent(headers=None): <NEW_LINE> <INDENT> headers = headers or {} <NEW_LINE> return dict(headers, **REQUESTS_HEADERS) | Default `requests` user agent is blocked on Ropsten, refs:
- https://github.com/corpetty/py-etherscan-api/issues/70
- https://www.reddit.com/r/etherscan/comments/dtg8xl/ | 625941bd15baa723493c3e7f |
def get_pics(): <NEW_LINE> <INDENT> node_order = Node.created.desc() <NEW_LINE> nodes = Node.query.filter((Node.type == "photo") | (Node.type == "lophoto")).order_by(node_order) <NEW_LINE> return nodes | This method will get the picture URLs for the page specified. | 625941bd4d74a7450ccd40ce |
def wake_up(self) -> None: <NEW_LINE> <INDENT> self.__send('action_wakeup', '') | Instructs the robot to execute the default wake_up behavior.
See: http://doc.aldebaran.com/2-8/naoqi/motion/control-stiffness-api.html?highlight=wakeup#ALMotionProxy::wakeUp | 625941bd1d351010ab855a28 |
def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> job = self.builder.queue.get() <NEW_LINE> self.RunJob(job) <NEW_LINE> self.builder.queue.task_done() | Our thread's run function
This thread picks a job from the queue, runs it, and then goes to the
next job. | 625941bdff9c53063f47c100 |
def __init__(self, num_seqs=None, num_bases=None, min_len=None, max_len=None, average=None, base_counts=None): <NEW_LINE> <INDENT> self.num_seqs = num_seqs <NEW_LINE> self.num_bases = num_bases <NEW_LINE> self.min_len = min_len <NEW_LINE> self.max_len = max_len <NEW_LINE> self.average = average <NEW_LINE> self.base_counts = base_counts | Build an empty FastaStats object | 625941bd236d856c2ad446e1 |
def get_input_file(**keys): <NEW_LINE> <INDENT> d=get_input_dir(**keys) <NEW_LINE> noisefree=keys.get("noisefree",False) <NEW_LINE> ftype=keys['ftype'] <NEW_LINE> front=get_front(**keys) <NEW_LINE> if noisefree and ftype=='meds': <NEW_LINE> <INDENT> bname=front+'.%(ftype)s.%(fnum)03i.g%(gnum)02i.noisefree' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bname=front+'.%(ftype)s.%(fnum)03i.g%(gnum)02i' <NEW_LINE> <DEDENT> bname=bname % keys <NEW_LINE> if ftype=='meds': <NEW_LINE> <INDENT> ext=keys.get('meds_ext','fits') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ext='fits' <NEW_LINE> <DEDENT> bname='%s.%s' % (bname, ext) <NEW_LINE> fname=os.path.join(d, bname) <NEW_LINE> return fname | parameters
----------
gdrun: keyword
The gdrun e.g. nbc-sva1-001
ftype: keyword
The file type, e.g. 'meds' 'truth'
fnum: keyword
The file number within given g set
gnum: keyword
The g (shear) number set
noisefree: bool
If true, return path to noisefree data; meds only.
meds_ext: string, optional
Extension, e.g. fits of fits.fz | 625941bd3317a56b86939b6b |
def set_multi_objective_exprs(self, exprs, priorities=None, weights=None, abstols=None, reltols=None, names=None): <NEW_LINE> <INDENT> self._set_multi_objective_exprs(exprs, priorities, weights, abstols, reltols, names, caller='Model.set_multi_objective()') | Defines a list of blended objectives.
Objectives with the same priority are combined using weights. Then, objectives are optimized in a
lexicographic fashion by decreasing priority.
Args:
exprs: Is converted to a list of linear expressions. Accepted types for this list items are variables,
linear expressions or numbers.
priorities: if defined, a list of priorities having the same size as the `exprs` argument. Priorities
define how objectives are grouped together into sub-problems, and in which order these sub-problems
are solved (in decreasing order of priorities).
weights: if defined, a list of weights having the same size as the `exprs` argument. Weights define
how objectives with same priority are blended together to define the associated sub-problem
objective that is optimized.
abstols: if defined, a list of absolute tolerances having the same size as the `exprs` argument.
reltols:if defined, a list of relative tolerances having the same size as the `exprs` argument.
names: if defined, a list of names for objectives having the same size as the `exprs` argument.
Note:
When using a number for an objective, the search will not optimize but only look for a feasible solution.
*New in version 2.9.* | 625941bd7d43ff24873a2ba9 |
def help(world): <NEW_LINE> <INDENT> print_sep() <NEW_LINE> print("以下为系统命令,可随时输入使用:") <NEW_LINE> for k, v in COMMANDS.items(): <NEW_LINE> <INDENT> print("%s: %s" % (k, v)) <NEW_LINE> <DEDENT> print_sep() | display the help
| 625941bd5fcc89381b1e15c8 |
def __init__(self, iterable = []): <NEW_LINE> <INDENT> self.front = None <NEW_LINE> self.back = None <NEW_LINE> self.length = 0 <NEW_LINE> if type(iterable) is dict: <NEW_LINE> <INDENT> for k,v in dict.items(): <NEW_LINE> <INDENT> self.enqueue({k:v}) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> for item in iterable: <NEW_LINE> <INDENT> self.enqueue(item) <NEW_LINE> <DEDENT> <DEDENT> except TypeError: <NEW_LINE> <INDENT> print('iterable must be of type <iterable>.') | Initialize queue and enqueue iterable elements. | 625941bd30c21e258bdfa3a7 |
def update_prompt(self): <NEW_LINE> <INDENT> indicators = [] <NEW_LINE> if daemon.is_root(): <NEW_LINE> <INDENT> indicators.append('root') <NEW_LINE> <DEDENT> if settings.debug > 0: <NEW_LINE> <INDENT> indicators.append('debug({})'.format(settings.debug)) <NEW_LINE> <DEDENT> self.prompt = '|'.join(indicators) + '> ' | Update the prompt based on system variables | 625941bdec188e330fd5a6b0 |
def get_queryset(self): <NEW_LINE> <INDENT> return models.Project.objects.filter( completed=False ).prefetch_related( 'position_set' ).order_by('id') | get the queryset to use in the template | 625941bdd164cc6175782c59 |
def _validate_sample(self, value): <NEW_LINE> <INDENT> if not isinstance(value, torch.Tensor): <NEW_LINE> <INDENT> raise ValueError('The value argument to log_prob must be a Tensor') <NEW_LINE> <DEDENT> event_dim_start = len(value.size()) - len(self._event_shape) <NEW_LINE> if value.size()[event_dim_start:] != self._event_shape: <NEW_LINE> <INDENT> raise ValueError('The right-most size of value must match event_shape: {} vs {}.'. format(value.size(), self._event_shape)) <NEW_LINE> <DEDENT> actual_shape = value.size() <NEW_LINE> expected_shape = self._batch_shape + self._event_shape <NEW_LINE> for i, j in zip(reversed(actual_shape), reversed(expected_shape)): <NEW_LINE> <INDENT> if i != 1 and j != 1 and i != j: <NEW_LINE> <INDENT> raise ValueError('Value is not broadcastable with batch_shape+event_shape: {} vs {}.'. format(actual_shape, expected_shape)) <NEW_LINE> <DEDENT> <DEDENT> if not self.support.check(value).all(): <NEW_LINE> <INDENT> raise ValueError('The value argument must be within the support') | Argument validation for distribution methods such as `log_prob`,
`cdf` and `icdf`. The rightmost dimensions of a value to be
scored via these methods must agree with the distribution's batch
and event shapes.
Args:
value (Tensor): the tensor whose log probability is to be
computed by the `log_prob` method.
Raises
ValueError: when the rightmost dimensions of `value` do not match the
distribution's batch and event shapes. | 625941bd55399d3f055885bf |
@login_required <NEW_LINE> def save_note(request): <NEW_LINE> <INDENT> user_id = request.session.get('user_id', ) <NEW_LINE> if request.method == 'POST': <NEW_LINE> <INDENT> note_form = NoteForm(request.POST) <NEW_LINE> if note_form.is_valid(): <NEW_LINE> <INDENT> book_id = note_form.cleaned_data['book_id'] <NEW_LINE> user_name = User.objects.get(user_id=user_id) <NEW_LINE> title = note_form.cleaned_data['title'] <NEW_LINE> content = note_form.cleaned_data['content'] <NEW_LINE> book_note = BookNote(book_id=book_id, user_id=user_id, user_name=user_name, title=title, content=content, create_date=datetime.date.today()) <NEW_LINE> book_note.save() <NEW_LINE> return HttpResponseRedirect('/book/' + book_id) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return render_to_response('index.html', ) | 保存读书笔记
:param request: 请求
:return: | 625941bdf9cc0f698b140509 |
def test_datetime_le(): <NEW_LINE> <INDENT> @dataclass <NEW_LINE> class T: <NEW_LINE> <INDENT> date_time: datetime = DateTimeField(le=datetime(2021,1,1)) <NEW_LINE> time_delta: timedelta = TimeDeltaField(le=timedelta(hours=1)) <NEW_LINE> date_val: date = DateField(le=date(2021,1,1)) <NEW_LINE> time_val: time = TimeField(le=time(hour=1)) <NEW_LINE> <DEDENT> valid_dt: datetime = datetime(2020, 1, 1) <NEW_LINE> invalid_dt: datetime = datetime(2021, 1, 2) <NEW_LINE> valid_td: timedelta = timedelta(hours=1) <NEW_LINE> invalid_td: timedelta = timedelta(hours=2) <NEW_LINE> valid_da: date = date(2020,1,1) <NEW_LINE> invalid_da: date = date(2021,1,2) <NEW_LINE> valid_ti: time = time(hour=1) <NEW_LINE> invalid_ti: time = time(hour=2) <NEW_LINE> t = T( date_time=valid_dt, time_delta=valid_td, date_val=valid_da, time_val=valid_ti ) <NEW_LINE> assert t.date_time == valid_dt <NEW_LINE> assert t.time_delta == valid_td <NEW_LINE> assert t.date_val == valid_da <NEW_LINE> assert t.time_val == valid_ti <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> t = T( date_time=invalid_dt, time_delta=valid_td, date_val=valid_da, time_val=valid_ti ) <NEW_LINE> <DEDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> t = T( date_time=valid_dt, time_delta=invalid_td, date_val=valid_da, time_val=valid_ti ) <NEW_LINE> <DEDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> t = T( date_time=valid_dt, time_delta=valid_td, date_val=invalid_da, time_val=valid_ti ) <NEW_LINE> <DEDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> t = T( date_time=valid_dt, time_delta=valid_td, date_val=valid_da, time_val=invalid_ti ) | Test datetime fields le
GTIVEN a dataclass with a date time field
WHEN a `le` limit is set
THEN any value that's set should be validated correctly | 625941bd96565a6dacc8f5d8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.