code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def getUniqueID(self): <NEW_LINE> <INDENT> return str(id(self))
Uses built-in python function to return a unique integer associated with this instance.
625941be7b180e01f3dc471d
def _set_axis(self, axis: int, labels, fastpath: bool = False) -> None: <NEW_LINE> <INDENT> if not fastpath: <NEW_LINE> <INDENT> labels = ensure_index(labels) <NEW_LINE> <DEDENT> if labels._is_all_dates: <NEW_LINE> <INDENT> if not isinstance(labels, (DatetimeIndex, PeriodIndex, TimedeltaIndex)): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> labels = DatetimeIndex(labels) <NEW_LINE> if fastpath: <NEW_LINE> <INDENT> self._mgr.set_axis(axis, labels) <NEW_LINE> <DEDENT> warnings.warn( "Automatically casting object-dtype Index of datetimes to " "DatetimeIndex is deprecated and will be removed in a " "future version. Explicitly cast to DatetimeIndex instead.", FutureWarning, stacklevel=3, ) <NEW_LINE> <DEDENT> except (tslibs.OutOfBoundsDatetime, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> object.__setattr__(self, "_index", labels) <NEW_LINE> if not fastpath: <NEW_LINE> <INDENT> self._mgr.set_axis(axis, labels)
Override generic, we want to set the _typ here. This is called from the cython code when we set the `index` attribute directly, e.g. `series.index = [1, 2, 3]`.
625941be4f6381625f114957
def diff_for_humans(self, other=None, absolute=False, locale=None): <NEW_LINE> <INDENT> is_now = other is None <NEW_LINE> if is_now: <NEW_LINE> <INDENT> other = self.today() <NEW_LINE> <DEDENT> diff = self.diff(other) <NEW_LINE> return pendulum.format_diff(diff, is_now, absolute, locale)
Get the difference in a human readable format in the current locale. When comparing a value in the past to default now: 1 day ago 5 months ago When comparing a value in the future to default now: 1 day from now 5 months from now When comparing a value in the past to another value: 1 day before 5 months before When comparing a value in the future to another value: 1 day after 5 months after :type other: Date :param absolute: removes time difference modifiers ago, after, etc :type absolute: bool :param locale: The locale to use for localization :type locale: str :rtype: str
625941be4e696a04525c9367
def test_classes(self): <NEW_LINE> <INDENT> self.assertEqual( self.env.default_mode, ClassDefaultMode.CONVENIENCE_MODE) <NEW_LINE> self.env.default_mode = ClassDefaultMode.CONSERVATION_MODE <NEW_LINE> self.assertEqual( self.env.default_mode, ClassDefaultMode.CONSERVATION_MODE) <NEW_LINE> defclass = self.env.find_class('USER') <NEW_LINE> self.assertTrue(defclass in self.env.classes()) <NEW_LINE> with self.assertRaises(LookupError): <NEW_LINE> <INDENT> self.env.find_class('NonExisting') <NEW_LINE> <DEDENT> defclass = self.env.find_class('ConcreteClass') <NEW_LINE> defclass.make_instance('some-instance') <NEW_LINE> defclass.make_instance('test-instance') <NEW_LINE> instance = self.env.find_instance('test-instance') <NEW_LINE> self.assertTrue(instance in self.env.instances()) <NEW_LINE> with self.assertRaises(LookupError): <NEW_LINE> <INDENT> self.env.find_instance('non-existing-instance') <NEW_LINE> <DEDENT> self.assertTrue(self.env.instances_changed) <NEW_LINE> self.assertFalse(self.env.instances_changed) <NEW_LINE> with TempFile() as tmp: <NEW_LINE> <INDENT> saved = self.env.save_instances(tmp.name) <NEW_LINE> self.env.reset() <NEW_LINE> loaded = self.env.load_instances(tmp.name) <NEW_LINE> self.assertEqual(saved, loaded) <NEW_LINE> <DEDENT> with TempFile() as tmp: <NEW_LINE> <INDENT> saved = self.env.save_instances(tmp.name) <NEW_LINE> self.env.reset() <NEW_LINE> loaded = self.env.restore_instances(tmp.name) <NEW_LINE> self.assertEqual(saved, loaded) <NEW_LINE> <DEDENT> with TempFile() as tmp: <NEW_LINE> <INDENT> saved = self.env.save_instances(tmp.name, binary=True) <NEW_LINE> self.env.reset() <NEW_LINE> loaded = self.env.load_instances(tmp.name) <NEW_LINE> self.assertEqual(saved, loaded)
Classes wrapper test.
625941bea4f1c619b28aff5a
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, XapiContext): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
Returns true if both objects are equal
625941be10dbd63aa1bd2ac1
def nestedTriangle(t, length): <NEW_LINE> <INDENT> pass
Takes two parameters: a turtle and a length. The function does the following: if the length is greater than 10, it repeats 3 times: moves forward that length, turns 120 degrees, and calls nestedTriangle(t, length/2).
625941be462c4b4f79d1d5eb
def update_page(temp, fileDict, fileName, index=False): <NEW_LINE> <INDENT> temp.seek(0) <NEW_LINE> soup = BeautifulSoup(temp.read()) <NEW_LINE> update_file_urls(soup, fileDict, index) <NEW_LINE> update_css_urls(soup, fileDict, index) <NEW_LINE> update_image_urls(soup, fileDict, index) <NEW_LINE> update_page_urls(soup, fileDict, index) <NEW_LINE> strip_script(soup) <NEW_LINE> write_page(soup, fileName)
Updates the links in an html file to match the new file locations. Parameters: temp: tempfile object fileDict: dict of all files linked to in a presentation index: list index of page to be processed
625941be3617ad0b5ed67e13
def start(self): <NEW_LINE> <INDENT> self.start_from(today_as_datetime())
if No date has been provided then start from today
625941be498bea3a759b99cb
def request_registry_reports(self, report_type): <NEW_LINE> <INDENT> if report_type not in (Global.ROSTER, Global.SWITCHES, Global.SIGNALS, Global.WARRANTS, Global.SENSORS, Global.DASHBOARD, Global.LAYOUT): <NEW_LINE> <INDENT> self.parent.log_queue.add_message("error", Global.REGISTRY+" "+Global.REPORT+" "+Global.TYPE+" "+Global.INVALID+": "+str(report_type)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> now_seconds = self.parent.now_seconds() <NEW_LINE> session_id = 'dt:'+str(int(now_seconds)) <NEW_LINE> if self.parent.topic_registry_pub is not None: <NEW_LINE> <INDENT> state_topic = self.parent.topic_registry_pub <NEW_LINE> state_message = self.parent.format_state_body(self.parent.node_name, Global.REGISTRY, session_id, None, {Global.REPORT:report_type}, response_topic=self.parent.topic_self_subscribed+"/"+Global.RES) <NEW_LINE> self.parent.send_to_mqtt(state_topic, state_message)
publish a request for a registry report
625941bef7d966606f6a9f1c
def compute_cryptographic_checksum(self, p1, p2, data): <NEW_LINE> <INDENT> if p1 != 0x8E or p2 != 0x80: <NEW_LINE> <INDENT> raise SwError(SW["ERR_INCORRECTP1P2"]) <NEW_LINE> <DEDENT> self.ssc += 1 <NEW_LINE> checksum = vsCrypto.crypto_checksum(self.cct.algorithm, self.cct.key, data, self.cct.iv, self.ssc) <NEW_LINE> return checksum
Compute a cryptographic checksum (e.g. MAC) for the given data. The ePass uses a Send Sequence Counter for MAC calculation
625941bee8904600ed9f1e44
def set_title(self): <NEW_LINE> <INDENT> file_name = None if self.variables.image_reader is None else self.variables.image_reader.file_name <NEW_LINE> if file_name is None: <NEW_LINE> <INDENT> the_title = "Validation Tool" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> the_title = "Image Viewer for {}".format(os.path.split(file_name)[1]) <NEW_LINE> <DEDENT> self.winfo_toplevel().title(the_title)
Sets the window title.
625941be1d351010ab855a37
def parse_to_string_dictionary(self, words): <NEW_LINE> <INDENT> dict_ = getattr(self, words[0]) <NEW_LINE> key = words[1] <NEW_LINE> value = words[2] <NEW_LINE> dict_[key] = value
Parse field to string dictionary. Args: words: strings to parse
625941be91af0d3eaac9b931
def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AreaContainer): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
Returns true if both objects are not equal
625941be3cc13d1c6d3c7296
@pytest.fixture <NEW_LINE> def observation_values_text(): <NEW_LINE> <INDENT> tz = 'UTC' <NEW_LINE> data_index = pd.date_range( start='20181202', end='20190112', freq='5min', tz=tz, closed='left') <NEW_LINE> data = pd.DataFrame({'value': data_index.hour, 'quality_flag': 0}, index=data_index) <NEW_LINE> text = utils.observation_df_to_json_payload(data) <NEW_LINE> return text.encode()
JSON text representation of test data
625941be31939e2706e4cd88
def factor_product(factor1, factor2): <NEW_LINE> <INDENT> new_variables = set(factor1.variables).union(set(factor2.variables)) <NEW_LINE> joined_variables = set.intersection(set(factor1.variables), set(factor2.variables)) <NEW_LINE> tmp1 = factor1.values.reset_index() <NEW_LINE> tmp2 = factor2.values.reset_index() <NEW_LINE> tmp = pd.merge(tmp1, tmp2, on=sorted(joined_variables)) <NEW_LINE> tmp['phi'] = np.multiply(tmp['phi_x'], tmp['phi_y']) <NEW_LINE> del tmp['phi_x'] <NEW_LINE> del tmp['phi_y'] <NEW_LINE> tmp = tmp.set_index(sorted(new_variables)).sort_index() <NEW_LINE> return Factor(tmp)
Compute the product of two factors Parameters ---------- factor1, factor2 : Factor Returns ------- Factor The resulting product
625941be23849d37ff7b2fab
def _procure_calculation_all_min_max(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> proc_obj = self.pool.get('procurement.order') <NEW_LINE> proc_obj._procure_orderpoint_confirm(cr, uid, use_new_cursor=cr.dbname, automatic=False, context=context) <NEW_LINE> return {}
@param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary
625941be32920d7e50b280e8
def get_default_border_size(version): <NEW_LINE> <INDENT> return 4 if version > 0 else 2
Returns the default border size (quiet zone) for the provided version. :param int version: 1 .. 40 or a Micro QR Code version constant. :rtype: int
625941be5510c4643540f305
def get_avg_time(self): <NEW_LINE> <INDENT> self._raise_not_implemented()
Get averaging time in ms.
625941be50812a4eaa59c23f
def move(self, uuid, new_base): <NEW_LINE> <INDENT> raise NotImplementedError("object backend is not capable of moving objects")
Move object to new base.
625941be24f1403a92600a84
def gray_historam(image:np.ndarray, bins:int=256, mask:np.ndarray=None) -> np.ndarray: <NEW_LINE> <INDENT> print(mask is None) <NEW_LINE> gray_image = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY) <NEW_LINE> hist = cv2.calcHist([gray_image],[0], mask, [bins], [0,256]) <NEW_LINE> hist = cv2.normalize(hist, hist) <NEW_LINE> return hist.flatten()
Extract histogram from grascale version of image Args: image: (H x W x C) 3D BGR image array of type np.uint8 bins: number of bins to use for histogram mask: check _descriptor(first function in file) Returns: 1D array of type np.float32 containing histogram feautures of image
625941be7b25080760e39375
def normalize(images): <NEW_LINE> <INDENT> return images / 255.0 - 0.5
Normalizes the input between -0.5 and +0.5
625941bed7e4931a7ee9de38
def remove_spurious_landmarks(self): <NEW_LINE> <INDENT> landmark_positions_ = [] <NEW_LINE> landmark_covariances_ = [] <NEW_LINE> landmark_counters_ = [] <NEW_LINE> for i in range(self.number_of_landmarks()): <NEW_LINE> <INDENT> if self.landmark_counters[i] >= 0: <NEW_LINE> <INDENT> landmark_positions_.append(self.landmark_positions[i]) <NEW_LINE> landmark_covariances_.append(self.landmark_covariances[i]) <NEW_LINE> landmark_counters_.append(self.landmark_counters[i]) <NEW_LINE> <DEDENT> <DEDENT> self.landmark_positions = landmark_positions_ <NEW_LINE> self.landmark_covariances = landmark_covariances_ <NEW_LINE> self.landmark_counters = landmark_counters_
Remove all landmarks which have a counter less than zero.
625941be96565a6dacc8f5e7
def __init__(self, jsondict=None, strict=True, **kwargs): <NEW_LINE> <INDENT> self.contact = None <NEW_LINE> self.copyright = None <NEW_LINE> self.date = None <NEW_LINE> self.description = None <NEW_LINE> self.document = None <NEW_LINE> self.experimental = None <NEW_LINE> self.fhirVersion = None <NEW_LINE> self.format = None <NEW_LINE> self.implementation = None <NEW_LINE> self.implementationGuide = None <NEW_LINE> self.imports = None <NEW_LINE> self.instantiates = None <NEW_LINE> self.jurisdiction = None <NEW_LINE> self.kind = None <NEW_LINE> self.messaging = None <NEW_LINE> self.name = None <NEW_LINE> self.patchFormat = None <NEW_LINE> self.publisher = None <NEW_LINE> self.purpose = None <NEW_LINE> self.rest = None <NEW_LINE> self.software = None <NEW_LINE> self.status = None <NEW_LINE> self.title = None <NEW_LINE> self.url = None <NEW_LINE> self.useContext = None <NEW_LINE> self.version = None <NEW_LINE> super(CapabilityStatement, self).__init__(jsondict=jsondict, strict=strict, **kwargs)
Initialize all valid properties. :raises: FHIRValidationError on validation errors, unless strict is False :param dict jsondict: A JSON dictionary to use for initialization :param bool strict: If True (the default), invalid variables will raise a TypeError
625941be2eb69b55b151c7c7
def __add__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Matrix): <NEW_LINE> <INDENT> raise MatrixError('cannot add %s to a matrix' % type(other)) <NEW_LINE> <DEDENT> if self.numrows != other.numrows or self.numcols != other.numcols: <NEW_LINE> <INDENT> raise MatrixError('cannot add matrices of different sizes') <NEW_LINE> <DEDENT> m = Matrix(self.numrows, self.numcols) <NEW_LINE> for row, col, element in self: <NEW_LINE> <INDENT> m[row][col] = element + other[row][col] <NEW_LINE> <DEDENT> return m
Addition: `self + other`.
625941be377c676e912720c4
def download_indices(self, conf, output): <NEW_LINE> <INDENT> return self._handle_elasticsearch( conf.etl.chembl, create_folder(os.path.join(output.prod_dir, conf.etl.chembl.path)) )
Download the specified indices from Elastic Search into the given output folder :param conf: configuration object :param output: output folder information :return: downloaded files listing
625941bed18da76e235323ee
def test_line_cell_info(): <NEW_LINE> <INDENT> ip = get_ipython() <NEW_LINE> ip.magics_manager.register(FooFoo) <NEW_LINE> oinfo = ip.object_inspect("foo") <NEW_LINE> assert oinfo["found"] is True <NEW_LINE> assert oinfo["ismagic"] is True <NEW_LINE> oinfo = ip.object_inspect("%%foo") <NEW_LINE> assert oinfo["found"] is True <NEW_LINE> assert oinfo["ismagic"] is True <NEW_LINE> assert oinfo["docstring"] == FooFoo.cell_foo.__doc__ <NEW_LINE> oinfo = ip.object_inspect("%foo") <NEW_LINE> assert oinfo["found"] is True <NEW_LINE> assert oinfo["ismagic"] is True <NEW_LINE> assert oinfo["docstring"] == FooFoo.line_foo.__doc__
%%foo and %foo magics are distinguishable to inspect
625941bedc8b845886cb544f
def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(DreamScreen, self).__init__(*args, **kwargs) <NEW_LINE> self._hdmi_input = None <NEW_LINE> self._hdmi_input_1_name = None <NEW_LINE> self._hdmi_input_2_name = None <NEW_LINE> self._hdmi_input_3_name = None <NEW_LINE> self._hdmi_active_channels = None
Initialize Base & Specific Attributes.
625941be56b00c62f0f14573
def timezone_test(self, stf, expected_tz): <NEW_LINE> <INDENT> stf.set_timezone() <NEW_LINE> self.assertEqual(stf.timezone, expected_tz)
A generic function to test the timezone determination code.
625941be236d856c2ad446f1
def test_register_ok(self): <NEW_LINE> <INDENT> test_url = base_url + 'api/user/register/' <NEW_LINE> logout_url = base_url + 'api/user/logout/' <NEW_LINE> s.get(logout_url) <NEW_LINE> response = s.post(test_url, json={'name': 'test', 'username': 'user_test', 'email': 'test@email.com', 'password': '123'}) <NEW_LINE> self.assertEqual(response.status_code, 201) <NEW_LINE> s.get(base_url + 'api/authenticate/c86627a2ecb27e0af08ec531423347005ea04dc7c837cad69409358f/') <NEW_LINE> self.delete_test_user()
Register should return 201
625941bec432627299f04b5f
def onReceived(self, data : bytes): <NEW_LINE> <INDENT> for id in self.connChilds: <NEW_LINE> <INDENT> self.plugins_info[id].onReceived(data)
call in receive thread, not UI thread
625941be8c3a8732951582d3
def _snr_single_region(spectrum, region=None): <NEW_LINE> <INDENT> if region is not None: <NEW_LINE> <INDENT> calc_spectrum = extract_region(spectrum, region) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> calc_spectrum = spectrum <NEW_LINE> <DEDENT> if hasattr(spectrum, 'mask') and spectrum.mask is not None: <NEW_LINE> <INDENT> flux = calc_spectrum.flux[~spectrum.mask] <NEW_LINE> uncertainty = calc_spectrum.uncertainty.quantity[~spectrum.mask] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flux = calc_spectrum.flux <NEW_LINE> uncertainty = calc_spectrum.uncertainty.quantity <NEW_LINE> <DEDENT> return np.mean(flux / uncertainty, axis=-1)
Calculate the mean S/N of the spectrum based on the flux and uncertainty in the spectrum. Parameters ---------- spectrum : `~specutils.spectra.spectrum1d.Spectrum1D` The spectrum object overwhich the equivalent width will be calculated. region: `~specutils.utils.SpectralRegion` Region within the spectrum to calculate the SNR. Returns ------- snr : `~astropy.units.Quantity` or list (based on region input) Signal to noise ratio of the spectrum or within the regions Notes ----- This is a helper function for the above `snr()` method.
625941bee76e3b2f99f3a72d
def separate_categories(data): <NEW_LINE> <INDENT> data = data.groupby(["category", "line"], axis=0).sum() <NEW_LINE> other = data.loc["other", :].reset_index() .astype(int).sort_values(by=["line"]) <NEW_LINE> script = data.loc["script-identifiable", :] .reset_index().astype(int).sort_values(by=["line"]) <NEW_LINE> return other, script
Separate the rows concerning "script-identifiable edits" from those concerning "other edits". Also, in each categry, and for each line, calculate the sum of levenshtein distances across all edits for that line. Return two separate DataFrames.
625941bea79ad161976cc060
def test_B(): <NEW_LINE> <INDENT> b_values = np.array([10**-i for i in range(5,-1,-1)]) <NEW_LINE> s_values = np.zeros(b_values.shape) <NEW_LINE> gabor = ComplexGabor("small_lena.jpg", 3, 8) <NEW_LINE> for _ind, beta in enumerate(b_values): <NEW_LINE> <INDENT> s_values[_ind] = compute_LPC_index(gabor, 2, beta) <NEW_LINE> <DEDENT> plt.figure("Test parameter B") <NEW_LINE> plt.xlabel("Beta value") <NEW_LINE> plt.ylabel("S_LPC value") <NEW_LINE> plt.semilogx(b_values, s_values, 'r+', ms=5) <NEW_LINE> plt.savefig(OUTPUT_PATH+"LPC_test_B.png")
Graph displaying of evolution of LPC index regarding beta parameter
625941bed53ae8145f87a18f
def change_settings(new_settings={}, file=None): <NEW_LINE> <INDENT> gl = globals() <NEW_LINE> if file is not None: <NEW_LINE> <INDENT> execfile(file) <NEW_LINE> gl.update(locals()) <NEW_LINE> <DEDENT> gl.update(new_settings)
Changes the value of configuration variables.
625941be3eb6a72ae02ec3f1
def get_content_type(self, **kw): <NEW_LINE> <INDENT> if "mimetype" in kw: <NEW_LINE> <INDENT> return kw.get("mimetype") <NEW_LINE> <DEDENT> if "content_type" in kw: <NEW_LINE> <INDENT> return kw.get("content_type") <NEW_LINE> <DEDENT> return None
extract the mimetype from the keywords
625941bef8510a7c17cf9616
def _generate_provenance(self): <NEW_LINE> <INDENT> exposure = definition( self._provenance['exposure_keywords']['exposure']) <NEW_LINE> hazard = definition( self._provenance['hazard_keywords']['hazard']) <NEW_LINE> hazard_category = definition(self._provenance['hazard_keywords'][ 'hazard_category']) <NEW_LINE> set_provenance( self._provenance, provenance_impact_function_name, self.name) <NEW_LINE> set_provenance( self._provenance, provenance_impact_function_title, self.title) <NEW_LINE> set_provenance( self._provenance, provenance_map_title, get_map_title(hazard, exposure, hazard_category)) <NEW_LINE> set_provenance( self._provenance, provenance_map_legend_title, exposure['layer_legend_title']) <NEW_LINE> set_provenance( self._provenance, provenance_analysis_question, get_analysis_question(hazard, exposure)) <NEW_LINE> if self.requested_extent: <NEW_LINE> <INDENT> set_provenance( self._provenance, provenance_requested_extent, self.requested_extent.asWktCoordinates()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> set_provenance( self._provenance, provenance_requested_extent, None) <NEW_LINE> <DEDENT> set_provenance( self._provenance, provenance_analysis_extent, self.analysis_extent.exportToWkt()) <NEW_LINE> set_provenance( self._provenance, provenance_data_store_uri, self.datastore.uri_path) <NEW_LINE> set_provenance(self._provenance, provenance_notes, self.notes()) <NEW_LINE> set_provenance( self._provenance, provenance_action_checklist, self.action_checklist()) <NEW_LINE> self._provenance_ready = True
Function to generate provenance at the end of the IF.
625941becb5e8a47e48b79c9
def in_order_traversal(self, node=None, queue=None) -> Queue: <NEW_LINE> <INDENT> if queue is None: <NEW_LINE> <INDENT> temp = Queue() <NEW_LINE> self.in_order_traversal(self.root, temp) <NEW_LINE> return temp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if node is not None: <NEW_LINE> <INDENT> self.in_order_traversal(node.left, queue) <NEW_LINE> queue.enqueue(node.value) <NEW_LINE> self.in_order_traversal(node.right, queue) <NEW_LINE> <DEDENT> <DEDENT> return queue
This is a recursive method that does not need to take any parameters. The only time this function takes parameters is when it is recursively calling itself so that it can create a queu to return to the user. This function will create a queue that traverses the left subtree, visits the root node, then traverses the right subtree.
625941be15fb5d323cde0a27
def api_get(query, raise_exception=True): <NEW_LINE> <INDENT> log.info('api_get: %s' % query) <NEW_LINE> response = requests.get(API_URL + query) <NEW_LINE> if not response.status_code == requests.codes.ok: <NEW_LINE> <INDENT> msg = 'api_get to %s failed for reason %s' % ( query, response.status_code) <NEW_LINE> log.error(msg) <NEW_LINE> if raise_exception: <NEW_LINE> <INDENT> raise ParseError(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> return json.loads(response.text)
Make a GET request to Breadcrumb API
625941be99cbb53fe6792b02
def _on_adjust_scroll_position(self, source, result): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> position_changed = source.call_finish(result)[0] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error('Adjust scroll position:' + str(e)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if position_changed: <NEW_LINE> <INDENT> self._get_scroll_position()
Obtain result to adjust scroll position Args: source (GObject.Object) result (Gio.AsyncResult)
625941be2ae34c7f2600d04d
def test__api_cart_addall_one_browse(self): <NEW_LINE> <INDENT> url = '/__cart/reset.json?reqno=42' <NEW_LINE> expected = {'recycled_count': 0, 'count': 0, 'reqno': 42} <NEW_LINE> self._run_json_equal(url, expected) <NEW_LINE> url = '/__cart/addall.json?view=browse&volumeid=VGISS_6210&reqno=456' <NEW_LINE> expected = {'recycled_count': 0, 'count': 906, 'error': False, 'reqno': 456} <NEW_LINE> self._run_json_equal(url, expected) <NEW_LINE> url = '/__cart/status.json?reqno=456' <NEW_LINE> expected = {'recycled_count': 0, 'count': 906, 'reqno': 456} <NEW_LINE> self._run_json_equal(url, expected)
[test_cart_api.py] /__cart/addall: one time no download browse
625941be167d2b6e31218ab1
def convert_example_to_feature(ex_idx, example, label_map, task): <NEW_LINE> <INDENT> prefix = getattr(example, 'prefix', None) <NEW_LINE> tokend_sent = TokenizedSentence(example.sentence, prefix=prefix) <NEW_LINE> if prefix is None: <NEW_LINE> <INDENT> ignore_mask = tokend_sent.attention_mask <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ignore_mask = tokend_sent.token_type_ids <NEW_LINE> <DEDENT> if task == 'labeling': <NEW_LINE> <INDENT> token_spans = {} <NEW_LINE> for span, tag in example.labels.items(): <NEW_LINE> <INDENT> token_spans[tokend_sent.char_span_to_token_span(span)] = label_map[f'B-{tag}'] <NEW_LINE> <DEDENT> label_ids = generate_label_ids(token_spans, ignore_mask) <NEW_LINE> <DEDENT> elif task == 'multihead_labeling': <NEW_LINE> <INDENT> label_ids = [] <NEW_LINE> for head in label_map: <NEW_LINE> <INDENT> token_spans = {} <NEW_LINE> for span, tag in example.labels.items(): <NEW_LINE> <INDENT> if tag == head: <NEW_LINE> <INDENT> token_spans[tokend_sent.char_span_to_token_span(span)] = 1 <NEW_LINE> <DEDENT> <DEDENT> label_ids.append( generate_label_ids(token_spans, ignore_mask) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f'Error! Invalid task: `f{task}`') <NEW_LINE> <DEDENT> if ex_idx < 5: <NEW_LINE> <INDENT> logger.info("*** Example ***") <NEW_LINE> logger.info("ex_idx: %s", ex_idx) <NEW_LINE> logger.info("tokens: %s", ' '.join(map(str, tokend_sent.tokens))) <NEW_LINE> logger.info("input_ids: %s", ' '.join(map(str, tokend_sent.input_ids))) <NEW_LINE> logger.info("attention_mask: %s", ' '.join(map(str, tokend_sent.attention_mask))) <NEW_LINE> logger.info("token_type_ids: %s", ' '.join(map(str, tokend_sent.token_type_ids))) <NEW_LINE> logger.info("label_ids: %s", ' '.join(map(str, label_ids))) <NEW_LINE> <DEDENT> return InputFeature( input_ids=tokend_sent.input_ids, attention_mask=tokend_sent.attention_mask, token_type_ids=tokend_sent.token_type_ids, label_ids=label_ids, )
先不考虑span重叠的情况
625941be5f7d997b871749b0
def conditions(self): <NEW_LINE> <INDENT> raise AssertionError
@return: A list of the conditions that are represented by this C{ConditionalProbDist}. Use the indexing operator to access the probability distribution for a given condition. @rtype: C{list}
625941be97e22403b379ceb4
def get_user(self, login=github.GithubObject.NotSet): <NEW_LINE> <INDENT> assert login is github.GithubObject.NotSet or isinstance(login, str), login <NEW_LINE> if login is github.GithubObject.NotSet: <NEW_LINE> <INDENT> return AuthenticatedUser.AuthenticatedUser(self.__requester, {}, {"url": "/user"}, completed=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> headers, data = self.__requester.requestJsonAndCheck( "GET", "/users/" + login ) <NEW_LINE> return github.NamedUser.NamedUser(self.__requester, headers, data, completed=True)
:calls: `GET /users/:user <http://developer.github.com/v3/users>`_ or `GET /user <http://developer.github.com/v3/users>`_ :param login: string :rtype: :class:`github.NamedUser.NamedUser`
625941be55399d3f055885ce
def short_version(version): <NEW_LINE> <INDENT> return float('.'.join(version.split('.')[0:2]))
Get a shorter version, only with the major and minor version. :param version: The version. :type version: str :return 'major.minor' version number. :rtype float
625941be30dc7b7665901885
def export_fractures_to_file(self, path): <NEW_LINE> <INDENT> num_intersecting_pairs = len(self.intersecting_pairs) <NEW_LINE> fracture_points_array = np.array( [np.array(list(f.nodes.irange())) for f in self.fractures], dtype=object) <NEW_LINE> intersection_points_array = np.array( self.intersection_points).reshape((num_intersecting_pairs, 2)) <NEW_LINE> intersecting_pairs_array = np.array( self.intersecting_pairs).reshape((num_intersecting_pairs, 2)) + 1 <NEW_LINE> mdict = {"fractures": fracture_points_array, "intersections": intersection_points_array, "intersecting_pairs": intersecting_pairs_array} <NEW_LINE> savemat(path, mdict)
Write the fractures matrix to a text file.
625941be596a8972360899df
def setup_platform(hass, config, add_devices, discovery_info=None): <NEW_LINE> <INDENT> username = config.get(CONF_USERNAME) <NEW_LINE> password = config.get(CONF_PASSWORD) <NEW_LINE> gateway = config.get("gateway") <NEW_LINE> try: <NEW_LINE> <INDENT> sinope_data = SinopeData(username, password, gateway) <NEW_LINE> sinope_data.update() <NEW_LINE> <DEDENT> except requests.exceptions.HTTPError as error: <NEW_LINE> <INDENT> _LOGGER.error("Failt login: %s", error) <NEW_LINE> return False <NEW_LINE> <DEDENT> name = config.get(CONF_NAME) <NEW_LINE> devices = [] <NEW_LINE> for id, device in sinope_data.data.items(): <NEW_LINE> <INDENT> devices.append(SinopeThermostat(sinope_data, id, '{} {}'.format(name, device["info"]["name"]))) <NEW_LINE> <DEDENT> add_devices(devices, True)
Set up the Sinope sensor.
625941bebaa26c4b54cb103e
def _identify_channels(self, name): <NEW_LINE> <INDENT> channel_list = [] <NEW_LINE> if self.nuke_node.Class() in ["Cryptomatte", "Encryptomatte"]: <NEW_LINE> <INDENT> channel_list = self.nuke_node.node('Input1').channels() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> channel_list = self.nuke_node.channels() <NEW_LINE> <DEDENT> channel_regex = re.compile(r'({name}\d+)\.(?:red|r)$'.format(name=name)) <NEW_LINE> pure_channels = [] <NEW_LINE> for channel in channel_list: <NEW_LINE> <INDENT> match = channel_regex.match(channel) <NEW_LINE> if match: <NEW_LINE> <INDENT> pure_channels.append(match.group(1)) <NEW_LINE> <DEDENT> <DEDENT> return sorted(pure_channels)[:len(GIZMO_CHANNEL_KNOBS)]
from a name like "cryptoObject", gets sorted channels, such as cryptoObject00, cryptoObject01, cryptoObject02
625941beaad79263cf390959
def load_result(folder): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = np.load(folder+'result.npy').item() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = np.load(folder+'result_old.npy').item() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> result = None <NEW_LINE> <DEDENT> <DEDENT> return result
Try to load the result file
625941be23849d37ff7b2fac
def deleteChildren(self): <NEW_LINE> <INDENT> for item in self.takeChildren(): <NEW_LINE> <INDENT> del item
Deletes all children (cleaning the subtree)
625941bed4950a0f3b08c26d
def syllableCount(word): <NEW_LINE> <INDENT> sounds = PHODICT[word] <NEW_LINE> return sum(int(sound[0] in "AEIOU") for sound in sounds)
returns the number of syllables in word
625941be7b25080760e39376
def A_2loop(t): <NEW_LINE> <INDENT> r = b[0]**2/b[1] <NEW_LINE> z = -r*exp(-1-t*r) <NEW_LINE> return -b[0]/(b[1]*(1+lambertw(z,-1).real) )
implicit solution for 2-loop coupling
625941be8e05c05ec3eea28e
def get_access_token(self, app_id="139232676651716", app_secret="e96006b4dba8fc282171d061c462f437", profile_id="631299528"): <NEW_LINE> <INDENT> warnings.filterwarnings('ignore', category=DeprecationWarning) <NEW_LINE> oauth_args = dict(client_id = app_id, client_secret = app_secret, grant_type = 'client_credentials') <NEW_LINE> oauth_curl_cmd = ['curl', 'https://graph.facebook.com/oauth/access_token?' + urllib.urlencode(oauth_args)] <NEW_LINE> oauth_response = subprocess.Popen(oauth_curl_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE).communicate()[0] <NEW_LINE> try: <NEW_LINE> <INDENT> oauth_access_token = str((json.loads(str(oauth_response)))["access_token"]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> assert False, "Unable to grab access token" <NEW_LINE> <DEDENT> return oauth_access_token
This method returns access token Args: Returns:
625941be44b2445a33931fb3
def active(self): <NEW_LINE> <INDENT> return self.filter(active__exact=True)
Return a list of active objects
625941be8a349b6b435e808f
def test_host_mapping(self): <NEW_LINE> <INDENT> H = Host <NEW_LINE> a, b = H("http://google.com"), H("http://yahoo.com") <NEW_LINE> vals = {a: 1, b: 2} <NEW_LINE> self.assertEqual(1, vals[a]) <NEW_LINE> self.assertEqual(2, vals[b])
Ensure that hosts can be used in hashes
625941bebf627c535bc130ea
def get_inv_type(xmlid, xmlinv): <NEW_LINE> <INDENT> for xmlinvElem in xmlinv: <NEW_LINE> <INDENT> varName = xmlinvElem[0].get("value") <NEW_LINE> if (varName == xmlid.get("value")): <NEW_LINE> <INDENT> elem = xmlinvElem <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> assert elem != None, "The concrete variable defined its type only in invariant from Machine. Please, define the types also in Invariant of Implementation" <NEW_LINE> function = elem[1] <NEW_LINE> op = function.get("operator") <NEW_LINE> if (op =="-->" or op=="+->" or op==">->" or op=="-->"): <NEW_LINE> <INDENT> domxml = function[1] <NEW_LINE> ranxml = function[2] <NEW_LINE> res = ast.make_array_type(domxml,ranxml) <NEW_LINE> <DEDENT> assert res != None <NEW_LINE> return res
Input: - xmlid: a XML node representing an identifier - xmlinv: a XML node Output: - The right node that define the variable type
625941beb830903b967e9829
def _cohort_settings(course_key): <NEW_LINE> <INDENT> return _get_course_cohort_settings_representation( cohorts.get_course_cohort_id(course_key), cohorts.is_course_cohorted(course_key) )
Fetch a course current cohort settings.
625941bec432627299f04b60
def reevaluate_node(self, node, remotes, build_mode, update): <NEW_LINE> <INDENT> assert node.binary == BINARY_UNKNOWN <NEW_LINE> output = node.conanfile.output <NEW_LINE> node._package_id = None <NEW_LINE> default_package_id_mode = self._cache.config.default_package_id_mode <NEW_LINE> default_python_requires_id_mode = self._cache.config.default_python_requires_id_mode <NEW_LINE> output.info("Unknown binary for %s, computing updated ID" % str(node.ref)) <NEW_LINE> self._compute_package_id(node, default_package_id_mode, default_python_requires_id_mode) <NEW_LINE> output.info("Updated ID: %s" % node.package_id) <NEW_LINE> if node.recipe in (RECIPE_CONSUMER, RECIPE_VIRTUAL): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> assert node.package_id != PACKAGE_ID_UNKNOWN <NEW_LINE> node.binary = None <NEW_LINE> self._evaluate_node(node, build_mode, update, remotes) <NEW_LINE> output.info("Binary for updated ID from: %s" % node.binary) <NEW_LINE> if node.binary == BINARY_BUILD: <NEW_LINE> <INDENT> output.info("Binary for the updated ID has to be built")
reevaluate the node is necessary when there is some PACKAGE_ID_UNKNOWN due to package_revision_mode
625941be76e4537e8c35158c
@dcc.reroute <NEW_LINE> def node_has_shape_of_type(node, shape_type): <NEW_LINE> <INDENT> raise NotImplementedError()
Returns whether or not given node has a shape of the given type attached to it :param node: str :param shape_type: str :return: bool
625941be3c8af77a43ae36ba
def process_data(self): <NEW_LINE> <INDENT> df = pd.read_csv('tennis.csv', header=None) <NEW_LINE> self.data = df.iloc[1:15, 1:6].values
Construct training data set
625941be7d43ff24873a2bba
def socket_connect(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> print('Connecting to '+ self.serverHost + ':' + str(self.serverPort) +' ...') <NEW_LINE> self.socket.connect((self.serverHost, self.serverPort)) <NEW_LINE> <DEDENT> except socket.error as e: <NEW_LINE> <INDENT> print("Socket connection error: " + str(e)) <NEW_LINE> time.sleep(5) <NEW_LINE> raise <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.socket.send(str.encode(socket.gethostname())) <NEW_LINE> <DEDENT> except socket.error as e: <NEW_LINE> <INDENT> print("Cannot send hostname to server: " + str(e)) <NEW_LINE> raise <NEW_LINE> <DEDENT> return
Connect to a remote socket
625941bec4546d3d9de7294e
def _move_process_to_set(self, cpuset: str, pid: int = os.getpid()): <NEW_LINE> <INDENT> self._cset("proc --move --force --pid %d --threads %s" % (pid, cpuset))
Move the process with the given id into the passed cpu set. :param cpuset: name of the passed cpu set :param pid: id of the process to move, default is the own process
625941be442bda511e8be338
def __init__(self, **kw): <NEW_LINE> <INDENT> self.titlesearch = kw.get('titlesearch', 0) <NEW_LINE> self.case = kw.get('case', 0) <NEW_LINE> self.regex = kw.get('regex', 0)
@keyword titlesearch: treat all terms as title searches @keyword case: do case sensitive search @keyword regex: treat all terms as regular expressions
625941be31939e2706e4cd89
def get_word_user(self,word_voc,max_length): <NEW_LINE> <INDENT> deal_word_user_q = self.deal_word_user_q[:max_length] <NEW_LINE> self.word_user_q = [word_voc[c] if c in word_voc else 0for c in deal_word_user_q] <NEW_LINE> self.word_user_q.extend([0]*(max_length-len(deal_word_user_q))) <NEW_LINE> del deal_word_user_q
得到词级别级用户问题的表示,用ID代替,包括paddding :param char_voc: 词级别的词典 :param max_length: padding长度 :return:
625941becad5886f8bd26ef6
def test_token_create(self): <NEW_LINE> <INDENT> result = {'retcode': 0, 'stdout': 'token'} <NEW_LINE> salt_mock = { 'cmd.run_all': MagicMock(return_value=result), } <NEW_LINE> with patch.dict(kubeadm.__salt__, salt_mock): <NEW_LINE> <INDENT> assert kubeadm.token_create() == 'token' <NEW_LINE> salt_mock['cmd.run_all'].assert_called_with( ['kubeadm', 'token', 'create'] )
Test kuebadm.token_create without parameters
625941be8e71fb1e9831d6c6
def __mul__(self, k): <NEW_LINE> <INDENT> return Vec2d(self.x * k, self.y * k)
возвращает произведение вектора на число
625941bed10714528d5ffbfc
def ladderLength(self, beginWord, endWord, wordList): <NEW_LINE> <INDENT> if not wordList and endWord not in wordList: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> queue = [beginWord] <NEW_LINE> result = 1 <NEW_LINE> flag = [False for _ in range(len(wordList))] <NEW_LINE> minResult = None <NEW_LINE> while queue: <NEW_LINE> <INDENT> tmp = [] <NEW_LINE> result += 1 <NEW_LINE> while queue: <NEW_LINE> <INDENT> cur = queue.pop() <NEW_LINE> for i, word in enumerate(wordList): <NEW_LINE> <INDENT> if flag[i]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> dis = 0 <NEW_LINE> for index, c in enumerate(word): <NEW_LINE> <INDENT> if c != cur[index]: <NEW_LINE> <INDENT> dis += 1 <NEW_LINE> if dis > 1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if dis == 1: <NEW_LINE> <INDENT> if word == endWord: <NEW_LINE> <INDENT> if minResult: <NEW_LINE> <INDENT> minResult = min(minResult, result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> minResult = result <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flag[i] = True <NEW_LINE> tmp.append(word) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> queue.extend(tmp) <NEW_LINE> <DEDENT> return minResult if minResult is not None else 0
:type beginWord: str :type endWord: str :type wordList: List[str] :rtype: int
625941be925a0f43d2549d90
def _finfCRW(th, beta): <NEW_LINE> <INDENT> return th - np.tan(th) - np.pi/(1.0 -beta)
Function that gives f(theta) = 0 when theta = theta_infty Version for spherically symmetric flow, as in CRW
625941bea8ecb033257d2fea
def pplot(*args, **kwargs): <NEW_LINE> <INDENT> figsize = kwargs.pop('figsize', dflt_figsize) <NEW_LINE> if isinstance(args[0], (pd.DataFrame, pd.Series)): <NEW_LINE> <INDENT> df, *args = args <NEW_LINE> plot_func = getattr(df, 'plot') <NEW_LINE> kwargs['figsize'] = figsize <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plot_func = plt.plot <NEW_LINE> if figsize: <NEW_LINE> <INDENT> plt.figure(figsize=figsize) <NEW_LINE> <DEDENT> <DEDENT> return plot_func(*args, **kwargs)
Long plot. Plots with a long default figsize
625941bea8370b77170527bd
def GetWindow(self, column=None): <NEW_LINE> <INDENT> column = (column is not None and [column] or [self._owner.GetMainColumn()])[0] <NEW_LINE> if column >= len(self._wnd): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._wnd[column]
Returns the window associated to the item. :param `column`: if not ``None``, an integer specifying the column index. If it is ``None``, the main column index is used.
625941bebe8e80087fb20b63
def download_from_original_site(image_url, image_name): <NEW_LINE> <INDENT> if not is_file_exisst(image_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = requests.get(image_url) <NEW_LINE> if result.status_code == 200: <NEW_LINE> <INDENT> print(image_name + ' 下载中') <NEW_LINE> with open(image_name, 'wb') as f: <NEW_LINE> <INDENT> for chunk in result.iter_content(chunk_size=1024): <NEW_LINE> <INDENT> f.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> print(image_name + ' 下载完成') <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print(image_url + ' 下载失败') <NEW_LINE> download_from_original_site(image_url, image_name)
从乃团服务器下载图片
625941be71ff763f4b5495a3
def exhaust(stream_or_iterable): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> iterator = iter(stream_or_iterable) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> iterator = ChunkIter(stream_or_iterable, 16384) <NEW_LINE> <DEDENT> collections.deque(iterator, maxlen=0)
Exhaust an iterator or stream.
625941be97e22403b379ceb5
def move_to_gripper_pose(self, pose, speed=None, timeout=None, threshold=None, straight_line=False): <NEW_LINE> <INDENT> if speed is None: <NEW_LINE> <INDENT> speed = self.config.LIMB_MAX_VELOCITY_RATIO <NEW_LINE> <DEDENT> if timeout is None: <NEW_LINE> <INDENT> timeout = self.config.LIMB_TIMEOUT <NEW_LINE> <DEDENT> if threshold is None: <NEW_LINE> <INDENT> threshold = self.config.LIMB_POSITION_THRESHOLD <NEW_LINE> <DEDENT> if straight_line: <NEW_LINE> <INDENT> start_pose = self.end_effector <NEW_LINE> end_pose = pose <NEW_LINE> if self._motion_planning is None: <NEW_LINE> <INDENT> rospy.logwarn('No motion planning is available. Use the hacky' 'way of straight line motion.') <NEW_LINE> delta_position = end_pose.position - start_pose.position <NEW_LINE> end_effector_step = 0.1 <NEW_LINE> num_waypoints = int(np.linalg.norm(delta_position) / end_effector_step) <NEW_LINE> waypoints = [] <NEW_LINE> for i in range(num_waypoints): <NEW_LINE> <INDENT> scale = float(i) / float(num_waypoints) <NEW_LINE> position = start_pose.position + delta_position * scale <NEW_LINE> euler = end_pose.euler <NEW_LINE> waypoint = Pose([position, euler]) <NEW_LINE> waypoints.append(waypoint) <NEW_LINE> <DEDENT> waypoints.append(end_pose) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> waypoints = [start_pose, end_pose] <NEW_LINE> <DEDENT> self.move_along_gripper_path(waypoints, speed=speed) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> positions = self._compute_inverse_kinematics( self.config.END_EFFCTOR_NAME, pose) <NEW_LINE> if positions is None: <NEW_LINE> <INDENT> rospy.logerr("IK response is not valid.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.move_to_joint_positions( positions=positions, speed=speed, timeout=timeout, threshold=threshold)
Move the arm to the specified joint positions. Please refer to: https://rethinkrobotics.github.io/intera_sdk_docs/5.0.4/intera_interface/html/intera_interface.limb.Limb-class.html See the parent class.
625941be3cc13d1c6d3c7297
def test_PLAN_unit_planner_Planning_schedule_insertSequenceBetween2Sequences(self): <NEW_LINE> <INDENT> self.s17.save() <NEW_LINE> self.s18.save() <NEW_LINE> self.s1718.save() <NEW_LINE> self.planning.initFromDB(self.planStart, self.planEnd) <NEW_LINE> self.planning.schedule() <NEW_LINE> self.subtest_PLAN_unit_planner_Planning_schedule_orderNSequences()
precond: 2 sequences action: Test if a third sequence can be inserted between two others postcond: the third sequence is planned between the first and second.
625941be50485f2cf553ccb5
def drawGrid(img, X=range(X0, X0+DX*(NCOL+1), DX), Y=range(Y0, Y0+DY*(NROW+1), DY), color=0, thick=2): <NEW_LINE> <INDENT> img = img.copy() <NEW_LINE> grid = [((x, 0), (x, img.shape[0])) for x in X] + [ ((0, y), (img.shape[1], y)) for y in Y] <NEW_LINE> for line in grid: <NEW_LINE> <INDENT> img = cv2.line(img, *line, color, thick) <NEW_LINE> <DEDENT> return img
draw the grid on the straightened image
625941bea17c0f6771cbdf6f
def geron(a, b, c): <NEW_LINE> <INDENT> p = (a + b + c) / 2 <NEW_LINE> s = sqrt(p * (p - a) * (p - b) * (p - c)) <NEW_LINE> return round(s, 2)
Вычисляет площадь треугольника по формуле Герона: >>> geron(3, 4, 5) 6.0 >>> geron(7, 8, 9) 26.83
625941bedd821e528d63b0c7
def __isub__(self, n: "ptrdiff_t") -> "swig::SwigPyIterator &": <NEW_LINE> <INDENT> return _numpyutilstest.SwigPyIterator___isub__(self, n)
__isub__(SwigPyIterator self, ptrdiff_t n) -> SwigPyIterator
625941be76d4e153a657ea4c
def addField(self, field): <NEW_LINE> <INDENT> self.fields.append(field)
Add a Field (column).
625941beb545ff76a8913d32
def _checkTessellationDivisions(self, minimumDivisions, refinementFactors, widget): <NEW_LINE> <INDENT> limit = 100000 <NEW_LINE> min = 1 <NEW_LINE> ref = 1 <NEW_LINE> totalDivisions = [1,1,1] <NEW_LINE> totalSize3d = 1 <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> if i < len(minimumDivisions): <NEW_LINE> <INDENT> min = minimumDivisions[i] <NEW_LINE> <DEDENT> if i < len(refinementFactors): <NEW_LINE> <INDENT> ref = refinementFactors[i] <NEW_LINE> <DEDENT> totalDivisions[i] = min*ref <NEW_LINE> totalSize3d = totalSize3d*min*ref <NEW_LINE> <DEDENT> totalSize2d = totalDivisions[0]*totalDivisions[1] <NEW_LINE> if totalDivisions[1]*totalDivisions[2] > totalSize2d: <NEW_LINE> <INDENT> totalSize2d = totalDivisions[1]*totalDivisions[2] <NEW_LINE> <DEDENT> if totalDivisions[2]*totalDivisions[0] > totalSize2d: <NEW_LINE> <INDENT> totalSize2d = totalDivisions[2]*totalDivisions[0] <NEW_LINE> <DEDENT> totalSize1d = totalDivisions[0] <NEW_LINE> if totalDivisions[1] > totalSize1d: <NEW_LINE> <INDENT> totalSize1d = totalDivisions[1] <NEW_LINE> <DEDENT> if totalDivisions[2] > totalSize1d: <NEW_LINE> <INDENT> totalSize1d = totalDivisions[2] <NEW_LINE> <DEDENT> meshSize3d = ZincRegion_getMeshSize(self._rootRegion, 3) <NEW_LINE> limit3d = limit <NEW_LINE> if limit3d < meshSize3d: <NEW_LINE> <INDENT> limit3d = meshSize3d <NEW_LINE> <DEDENT> overLimit3d = totalSize3d*meshSize3d > limit3d <NEW_LINE> meshSize2d = ZincRegion_getMeshSize(self._rootRegion, 2) <NEW_LINE> limit2d = limit <NEW_LINE> if limit2d < meshSize2d: <NEW_LINE> <INDENT> limit2d = meshSize2d <NEW_LINE> <DEDENT> overLimit2d = totalSize2d*meshSize2d > limit2d <NEW_LINE> meshSize1d = ZincRegion_getMeshSize(self._rootRegion, 1) <NEW_LINE> limit1d = limit <NEW_LINE> if limit1d < meshSize1d: <NEW_LINE> <INDENT> limit1d = meshSize1d <NEW_LINE> <DEDENT> overLimit1d = totalSize1d*meshSize1d > limit1d <NEW_LINE> if not (overLimit1d or overLimit2d or overLimit3d): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> widget.blockSignals(True) <NEW_LINE> msgBox = QtGui.QMessageBox() <NEW_LINE> msgBox.setWindowTitle("ZincView") <NEW_LINE> divisionsText = "*".join('{:d}'.format(value) for value in totalDivisions) <NEW_LINE> msgBox.setText("Fine tessellation divisions " + divisionsText + " can take a long time to apply.") <NEW_LINE> msgBox.setInformativeText("Please confirm action.") <NEW_LINE> msgBox.setStandardButtons(QtGui.QMessageBox.Apply | QtGui.QMessageBox.Cancel) <NEW_LINE> msgBox.setDefaultButton(QtGui.QMessageBox.Cancel) <NEW_LINE> result = msgBox.exec_() <NEW_LINE> widget.blockSignals(False) <NEW_LINE> return result == QtGui.QMessageBox.Apply
Check total divisions not too high or get user confirmation Call with both of the vectors set, each must have at least one component. Returns True if can apply.
625941be7047854f462a1328
def overlaps(self, other): <NEW_LINE> <INDENT> if self.chrom != other.chrom: return False <NEW_LINE> if self.start > other.end: return False <NEW_LINE> if other.start > self.end: return False <NEW_LINE> return True
check for overlap with the other interval
625941bebe383301e01b53a8
def decoding(self): <NEW_LINE> <INDENT> decoded, _ = tf.nn.ctc_greedy_decoder(self.logits_t, self.seq_lens) <NEW_LINE> sparse_decode_op = decoded[0] <NEW_LINE> self.decode_op = tf.sparse_to_dense(sparse_decode_op.indices, sparse_decode_op.dense_shape, sparse_decode_op.values) <NEW_LINE> return self.decode_op
Predict labels from learned sequence model.
625941be009cb60464c632d0
def __init__(self, *args): <NEW_LINE> <INDENT> this = _openmm.new_AmoebaGeneralizedKirkwoodForce(*args) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.this = this
__init__(self) -> AmoebaGeneralizedKirkwoodForce __init__(self, other) -> AmoebaGeneralizedKirkwoodForce This class implements an implicit solvation force using the generalized Kirkwood/Grycuk model. To use this class, create an AmoebaGeneralizedKirkwoodForce object, then call addParticle() once for each particle in the System to define its parameters. The number of particles for which you define parameters must be equal to the number of particles in the System, or else an exception will be thrown when you try to create a Context. After a particle has been added, you can modify its force field parameters by calling setParticleParameters(). This will have no effect on Contexts that already exist unless you call updateParametersInContext().
625941bed268445f265b4d8b
def get(self, *args): <NEW_LINE> <INDENT> params = self.parse_query_string(args[0]) <NEW_LINE> module = self.get_module(params) <NEW_LINE> impl = module.Crash(config=self.context) <NEW_LINE> return impl.get(**params)
Called when a get HTTP request is executed to /crash
625941be73bcbd0ca4b2bf93
def predict_proba_with_ds(self, query, predictions, probabilities, neighbors=None, distances=None, DFP_mask=None): <NEW_LINE> <INDENT> if query.shape[0] != probabilities.shape[0]: <NEW_LINE> <INDENT> raise ValueError('The arrays query and predictions must have the same number of samples. query.shape is {}' 'and predictions.shape is {}' .format(query.shape, predictions.shape)) <NEW_LINE> <DEDENT> selected_classifiers = self.select(query) <NEW_LINE> ensemble_proba = probabilities[np.arange(probabilities.shape[0])[:, None], selected_classifiers, :] <NEW_LINE> predicted_proba = np.mean(ensemble_proba, axis=1) <NEW_LINE> return predicted_proba
Predicts the label of the corresponding query sample. Parameters ---------- query : array of shape = [n_samples, n_features] The test examples. predictions : array of shape = [n_samples, n_classifiers] Predictions of the base classifiers for all test examples. probabilities : array of shape = [n_samples, n_classifiers, n_classes] Probabilities estimates of each base classifier for all test examples. neighbors : array of shale = [n_samples, n_neighbors] Indices of the k nearest neighbors according for each test sample distances : array of shale = [n_samples, n_neighbors] Distances of the k nearest neighbors according for each test sample DFP_mask : array of shape = [n_samples, n_classifiers] Mask containing 1 for the selected base classifier and 0 otherwise. Returns ------- predicted_proba : array of shape = [n_samples, n_classes] Posterior probabilities estimates for each test example
625941be3eb6a72ae02ec3f2
def _make_schema( db_name: Optional[str] = None, ) -> Tuple[DatabaseConnector, Dict[str, Dict[str, str]]]: <NEW_LINE> <INDENT> db_connector = make_test_database(db_name) <NEW_LINE> default_parameters = get_default_parameters <NEW_LINE> final_parameters = FinalParameters(default_parameters) <NEW_LINE> final_parameters_dict = final_parameters.get_final_parameters() <NEW_LINE> final_parameters_as_sql_types = final_parameters.cast_to_sql_type( final_parameters_dict ) <NEW_LINE> db_creator = DatabaseCreator(db_connector) <NEW_LINE> db_creator.create_all_schema_tables(final_parameters_as_sql_types) <NEW_LINE> return db_connector, final_parameters_as_sql_types
Create the schema (i.e. make all the tables) of the database. Parameters ---------- db_name : None or str Name of the database Returns ------- db_connector : DatabaseConnector The database connection object final_parameters_as_sql_types : dict Final parameters as sql types
625941be76e4537e8c35158d
@deprecate_positional_args <NEW_LINE> def time_stretch(y, *, rate, **kwargs): <NEW_LINE> <INDENT> if rate <= 0: <NEW_LINE> <INDENT> raise ParameterError("rate must be a positive number") <NEW_LINE> <DEDENT> stft = core.stft(y, **kwargs) <NEW_LINE> stft_stretch = core.phase_vocoder( stft, rate=rate, hop_length=kwargs.get("hop_length", None), n_fft=kwargs.get("n_fft", None), ) <NEW_LINE> len_stretch = int(round(y.shape[-1] / rate)) <NEW_LINE> y_stretch = core.istft(stft_stretch, dtype=y.dtype, length=len_stretch, **kwargs) <NEW_LINE> return y_stretch
Time-stretch an audio series by a fixed rate. Parameters ---------- y : np.ndarray [shape=(..., n)] audio time series. Multi-channel is supported. rate : float > 0 [scalar] Stretch factor. If ``rate > 1``, then the signal is sped up. If ``rate < 1``, then the signal is slowed down. **kwargs : additional keyword arguments. See `librosa.decompose.stft` for details. Returns ------- y_stretch : np.ndarray [shape=(..., round(n/rate))] audio time series stretched by the specified rate See Also -------- pitch_shift : pitch shifting librosa.phase_vocoder : spectrogram phase vocoder pyrubberband.pyrb.time_stretch : high-quality time stretching using RubberBand Examples -------- Compress to be twice as fast >>> y, sr = librosa.load(librosa.ex('choice')) >>> y_fast = librosa.effects.time_stretch(y, rate=2.0) Or half the original speed >>> y_slow = librosa.effects.time_stretch(y, rate=0.5)
625941be4a966d76dd550f29
def make_if(predicate, consequent, alternative): <NEW_LINE> <INDENT> return ["if", predicate, consequent, alternative]
Given `predicate` part, `consequent` part and `alternative` part, construct a new `if` expression.
625941be71ff763f4b5495a4
def getNodeStatusReport(self): <NEW_LINE> <INDENT> report = StatusReport() <NEW_LINE> for inst in self.coordinators: <NEW_LINE> <INDENT> if (inst.status == DbClusterStatus.INSTANCE_STATUS_NORMAL): <NEW_LINE> <INDENT> report.cooNormal += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.cooAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> for inst in self.gtms: <NEW_LINE> <INDENT> if (inst.status == DbClusterStatus.INSTANCE_STATUS_PRIMARY): <NEW_LINE> <INDENT> report.gtmPrimary += 1 <NEW_LINE> <DEDENT> elif (inst.status == DbClusterStatus.INSTANCE_STATUS_STANDBY): <NEW_LINE> <INDENT> if (inst.connStatus == DbClusterStatus.CONN_STATUS_NORMAL): <NEW_LINE> <INDENT> report.gtmStandby += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.gtmAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> elif (inst.status == DbClusterStatus.INSTANCE_STATUS_DOWN): <NEW_LINE> <INDENT> report.gtmDown += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.gtmAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> for inst in self.datanodes: <NEW_LINE> <INDENT> if (inst.status == DbClusterStatus.INSTANCE_STATUS_PRIMARY): <NEW_LINE> <INDENT> report.dnPrimary += 1 <NEW_LINE> <DEDENT> elif (inst.status == DbClusterStatus.INSTANCE_STATUS_STANDBY): <NEW_LINE> <INDENT> if (inst.haStatus == DbClusterStatus.HA_STATUS_NORMAL): <NEW_LINE> <INDENT> report.dnStandby += 1 <NEW_LINE> <DEDENT> elif (inst.haStatus == DbClusterStatus.HA_STATUS_BUILD): <NEW_LINE> <INDENT> report.dnBuild += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.dnAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> elif (inst.status == DbClusterStatus.INSTANCE_STATUS_DOWN): <NEW_LINE> <INDENT> report.dnDown += 1 <NEW_LINE> <DEDENT> elif (inst.status == DbClusterStatus.INSTANCE_STATUS_DUMMY): <NEW_LINE> <INDENT> report.dnDummy += 1 <NEW_LINE> <DEDENT> elif inst.status == DbClusterStatus.INSTANCE_STATUS_CASCADE_STANDBY: <NEW_LINE> <INDENT> report.dn_cascade_standby += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.dnAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> for inst in self.fencedUDFs: <NEW_LINE> <INDENT> if (inst.status == DbClusterStatus.INSTANCE_STATUS_NORMAL): <NEW_LINE> <INDENT> report.fencedUDFNormal += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report.fencedUDFAbnormal += 1 <NEW_LINE> <DEDENT> <DEDENT> return report
function : Get the status report of node input : NA output : report
625941bed10714528d5ffbfd
def countCommon(field): <NEW_LINE> <INDENT> total = Register.objects.filter(Common_Illness=field) <NEW_LINE> return total.count()
A function to count all Common Illnesses for Metric values
625941bede87d2750b85fcac
def event_m10_25_11030(): <NEW_LINE> <INDENT> assert EventEnded(11000) != 0 <NEW_LINE> assert event_m10_25_x148(z12=6001) <NEW_LINE> EndMachine() <NEW_LINE> Quit()
Enemy whose number changes_Death count_02
625941be4c3428357757c246
def will_exit_to_host(self, host): <NEW_LINE> <INDENT> for line in self.desc.exitpolicy: <NEW_LINE> <INDENT> if (host & line.netmask) == line.ip: <NEW_LINE> <INDENT> if line.match: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
@param host: the address to check against the exit policy @type host: int @returns: True if there is ANY way this router will allow exits to host.
625941bed164cc6175782c6a
def set_SandboxMode(self, value): <NEW_LINE> <INDENT> super(GetSessionIDInputSet, self)._set_input('SandboxMode', value)
Set the value of the SandboxMode input for this Choreo. ((optional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
625941be01c39578d7e74d57
def validate_constraints(self): <NEW_LINE> <INDENT> struct_value = self.get_struct_value() <NEW_LINE> self.validate_struct_value(struct_value)
Validate if the current VapiStruct instance satisfies all the constraints of this VapiStruct type. :raise :class:`vmware.vapi.exception.CoreException` if a constraint is not satisfied
625941beadb09d7d5db6c6ae
def sdb_get(uri, opts): <NEW_LINE> <INDENT> if not isinstance(uri, string_types): <NEW_LINE> <INDENT> return uri <NEW_LINE> <DEDENT> if not uri.startswith('sdb://'): <NEW_LINE> <INDENT> return uri <NEW_LINE> <DEDENT> comps = uri.replace('sdb://', '').split('/') <NEW_LINE> if len(comps) < 2: <NEW_LINE> <INDENT> return uri <NEW_LINE> <DEDENT> profile = opts.get(comps[0], {}) <NEW_LINE> if 'driver' not in profile: <NEW_LINE> <INDENT> return uri <NEW_LINE> <DEDENT> fun = '{0}.get'.format(profile['driver']) <NEW_LINE> query = comps[1] <NEW_LINE> loaded_db = salt.loader.sdb(opts, fun) <NEW_LINE> return loaded_db[fun](query, profile=profile)
Get a value from a db, using a uri in the form of ``sdb://<profile>/<key>``. If the uri provided does not start with ``sdb://``, then it will be returned as-is.
625941beeab8aa0e5d26da74
def return_fit(cl, re): <NEW_LINE> <INDENT> cd = (0.0247*cl**2.49*re**-1.11 + 2.03e-7*cl**12.7*re**-0.338 + 6.35e10*cl**-0.243*re**-3.43 + 6.49e-6*cl**-1.9*re**-0.681)**(1/3.72) <NEW_LINE> return cd
polar fit for the JHO1 airfoil
625941be5fcc89381b1e15d9
def create_parser(self): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument( 'n', type=float, help='Northern border must be specified as double value') <NEW_LINE> parser.add_argument( 'e', type=float, help='Eastern border must be specified as double value') <NEW_LINE> parser.add_argument( 'w', type=float, help='Western border must be specified as double value') <NEW_LINE> parser.add_argument( 's', type=float, help='Southern border must be specified as double value') <NEW_LINE> parser.add_argument('res', type=float, help='Resolution must be specified as double value') <NEW_LINE> parser.add_argument( 'nsres', type=float, help='North-South resolution must be specified as double value') <NEW_LINE> parser.add_argument( 'ewres', type=float, help='East-West resolution must be specified as double value') <NEW_LINE> parser.add_argument( 'raster', type=str, help='The raster layer name from which the region should be used, ' 'with mapset information: name@mapset') <NEW_LINE> parser.add_argument( 'align', type=str, help='The raster layer name from which the region should be aligned, ' 'with mapset information: name@mapset') <NEW_LINE> parser.add_argument( 'zoom', type=str, help='The raster layer name to which the region should be zoomed, ' 'with mapset information: name@mapset') <NEW_LINE> parser.add_argument( 'vector', type=str, help='The vector layer name from which the region should be used, ' 'with mapset information: name@mapset') <NEW_LINE> return parser
Create the g.region option arguments The parameter contain: n : for north s : for south e : for east w : for west res : resolution of the region ewres: east-west resolution nsres: north-south resolution raster: Raster layer name with mapset to set the region from align: Raster layer name with mapset to align the region to zoom: Raster layer name with mapset to zoom the region to vector: Vector layer name with mapset to set the region from Returns: The argument parser
625941be29b78933be1e55cd
def match_script_to_templates(self): <NEW_LINE> <INDENT> for script2 in TxScript.TEMPLATES: <NEW_LINE> <INDENT> r = [] <NEW_LINE> pc1 = pc2 = 0 <NEW_LINE> while 1: <NEW_LINE> <INDENT> if pc1 == len(self.script) and pc2 == len(script2): <NEW_LINE> <INDENT> return r <NEW_LINE> <DEDENT> opcode1, data1, pc1 = tools.get_opcode(self.script, pc1) <NEW_LINE> opcode2, data2, pc2 = tools.get_opcode(script2, pc2) <NEW_LINE> if opcode2 == opcodes.OP_PUBKEY: <NEW_LINE> <INDENT> l1 = len(data1) <NEW_LINE> if l1 < 33 or l1 > 120: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> r.append((opcode2, data1)) <NEW_LINE> <DEDENT> elif opcode2 == opcodes.OP_PUBKEYHASH: <NEW_LINE> <INDENT> if len(data1) != 160/8: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> r.append((opcode2, data1)) <NEW_LINE> <DEDENT> elif (opcode1, data1) != (opcode2, data2): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise SolvingError("don't recognize output script")
Examine the script passed in by tx_out_script and see if it matches the form of one of the templates in TEMPLATES. If so, return the form it matches; otherwise, return None.
625941be498bea3a759b99cc
@api_view(['POST']) <NEW_LINE> @isAuthenticate <NEW_LINE> @RbacService('chat:request:action') <NEW_LINE> def approve_reject(request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validator = ApproverejectValidator(request.data) <NEW_LINE> valid = validator.validate() <NEW_LINE> if valid: <NEW_LINE> <INDENT> is_approve = request.data.get('is_approve') <NEW_LINE> user_id = request.data.get('user_id') <NEW_LINE> logged_user_id = request.user_id <NEW_LINE> if ChatList.objects.filter(user_id=logged_user_id, created_by=user_id, is_request_accepted=1).exists(): <NEW_LINE> <INDENT> return Response({'error': Messages.CHAT_REQUEST_ALREADY_ACCEPTED}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> if not ChatList.objects.filter(user_id=logged_user_id, created_by=user_id).exists(): <NEW_LINE> <INDENT> return Response({'error': Messages.NO_CHAT_REQUEST}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> if int(is_approve) == 1: <NEW_LINE> <INDENT> ChatList.objects.filter(created_by=user_id, user_id=logged_user_id).update( is_request_accepted = 1, last_message_updated_at=calendar.timegm(time.gmtime()) ) <NEW_LINE> msg = Messages.CHAT_REQUEST_ACCEPTED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = Messages.CHAT_REQUEST_REJECTED <NEW_LINE> ChatList.objects.filter(created_by=user_id, user_id=logged_user_id).delete() <NEW_LINE> <DEDENT> return Response({'message': msg}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response({'error':requestErrorMessagesFormate(validator.get_message())}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> logerror('chat/views.py/approve_reject', str(exception)) <NEW_LINE> return Response({'error':str(exception)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@api {POST} v1/user/chat/approve-reject Approve-reject chat request @apiName Approve-reject chat request @apiGroup Chat @apiHeader {String} authorization Users unique access-token @apiParam {integer} user_id event id from notification list @apiParam {integer} is_approve 1 -> accept 0 -> reject @apiSuccessExample Success-Response: HTTP/1.1 201 OK { "message": "Request accepted" } HTTP/1.1 201 OK { "message": "Request rejected" }
625941be85dfad0860c3ad76
def delete(self): <NEW_LINE> <INDENT> for tag_option in self.tag_options: <NEW_LINE> <INDENT> self.remove_tag_option(tag_option) <NEW_LINE> <DEDENT> return self.client.delete_portfolio( AcceptLanguage=self.Meta.language, Id=self.id )
Delete this portfolio object by calling boto3 delete_potfolio. https://boto3.readthedocs.io/en/latest/reference/services/servicecatalog.html#ServiceCatalog.Client.delete_portfolio
625941be7047854f462a1329
def validate_route_nexthops(routes, interfaces, sources, enable_color, quiet=False): <NEW_LINE> <INDENT> invalid_routes = defaultdict(list) <NEW_LINE> MISSING_NEXTHOP = 'Nexthop does not exist' <NEW_LINE> INVALID_SUBNET = 'Nexthop address is not in the same subnet as interface' <NEW_LINE> INVALID_LINK_LOCAL = 'Nexthop address is not link local' <NEW_LINE> error_msg = [] <NEW_LINE> for route in routes: <NEW_LINE> <INDENT> dest = ipnetwork.sprint_prefix(route.dest) <NEW_LINE> invalid_nexthop = defaultdict(list) <NEW_LINE> for nh in route.nexthops: <NEW_LINE> <INDENT> if nh.ifName not in interfaces or not interfaces[nh.ifName].info.isUp: <NEW_LINE> <INDENT> invalid_nexthop[MISSING_NEXTHOP].append(ip_nexthop_to_str(nh)) <NEW_LINE> continue <NEW_LINE> <DEDENT> if ipnetwork.ip_version(nh.addr) == 4: <NEW_LINE> <INDENT> for addr in interfaces[nh.ifName].info.v4Addrs: <NEW_LINE> <INDENT> if not ipnetwork.is_same_subnet(nh.addr, addr.addr, '31'): <NEW_LINE> <INDENT> invalid_nexthop[INVALID_SUBNET].append(ip_nexthop_to_str(nh)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif (ipnetwork.ip_version(nh.addr) == 6 and not ipnetwork.is_link_local(nh.addr)): <NEW_LINE> <INDENT> invalid_nexthop[INVALID_LINK_LOCAL].append(ip_nexthop_to_str(nh)) <NEW_LINE> <DEDENT> <DEDENT> for k, v in invalid_nexthop.items(): <NEW_LINE> <INDENT> invalid_routes[k].extend(build_routes([dest], v)) <NEW_LINE> <DEDENT> <DEDENT> if not invalid_routes: <NEW_LINE> <INDENT> if not quiet: <NEW_LINE> <INDENT> if enable_color: <NEW_LINE> <INDENT> click.echo(click.style('PASS', bg='green', fg='black')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.echo('PASS') <NEW_LINE> <DEDENT> print('Route validation successful') <NEW_LINE> <DEDENT> return True, error_msg <NEW_LINE> <DEDENT> if not quiet: <NEW_LINE> <INDENT> if enable_color: <NEW_LINE> <INDENT> click.echo(click.style('FAIL', bg='red', fg='black')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> click.echo('FAIL') <NEW_LINE> <DEDENT> print('Route validation failed') <NEW_LINE> <DEDENT> for err, route_db in invalid_routes.items(): <NEW_LINE> <INDENT> caption = 'Error: {}'.format(err) <NEW_LINE> if not quiet: <NEW_LINE> <INDENT> print_routes(caption, route_db) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error_msg.append(caption) <NEW_LINE> <DEDENT> <DEDENT> return False, error_msg
Validate between fib routes and lm interfaces :param routes: list ip_types.UnicastRoute (structured routes) :param interfaces: dict<interface-name, InterfaceDetail>
625941be56b00c62f0f14574
def serialize_numpy(self, buff, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> buff.write(_get_struct_B().pack(self.servo1)) <NEW_LINE> _x = self.servoangle <NEW_LINE> if type(_x) in [list, tuple]: <NEW_LINE> <INDENT> buff.write(_get_struct_8B().pack(*_x)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> buff.write(_get_struct_8s().pack(_x)) <NEW_LINE> <DEDENT> <DEDENT> except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) <NEW_LINE> except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
625941be0a50d4780f666dad