code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
@register.filter(name='course_organization') <NEW_LINE> def course_organization(course_key): <NEW_LINE> <INDENT> return CourseKey.from_string(course_key).org | Retrieve course organization from course key.
Arguments:
course_key (str): Course key.
Returns:
str: Course organization. | 625941be21a7993f00bc7bf9 |
def select_voting_classifier_predict(modelName='', modelType='', model=None, df_input=None, xdata_s=None, pred_type='pred'): <NEW_LINE> <INDENT> print(f'Predicting model: {modelName}') <NEW_LINE> xdata_s = pd.DataFrame(xdata_s) <NEW_LINE> xdata_s.columns = xdata_s.columns.astype(str) <NEW_LINE> if model['fs'] is None: <NEW_LINE> <INDENT> X_selected = xdata_s <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> X_selected = model['fs'].transform(xdata_s) <NEW_LINE> <DEDENT> df_input.loc[:, pred_type] = model['estimator'].predict(X_selected) <NEW_LINE> return df_input | A function to make predictions with an input VotingClassifier. Works
the same way as select_model_predict(). | 625941be2ae34c7f2600d040 |
def _num_records(x_data, y_data, num_records=None): <NEW_LINE> <INDENT> if type(x_data) in [list, tuple]: <NEW_LINE> <INDENT> for x in x_data: <NEW_LINE> <INDENT> num_records = _num_records(x, y_data, num_records) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if num_records is None: <NEW_LINE> <INDENT> num_records = x_data.size(0) <NEW_LINE> if y_data is not None: <NEW_LINE> <INDENT> assert num_records == y_data.size(0), "data and labels must be the same size" <NEW_LINE> num_records = y_data.size(0) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert num_records == x_data.size(0), "all inputs sets must have same number of records" <NEW_LINE> num_records = x_data.size(0) <NEW_LINE> <DEDENT> <DEDENT> return num_records | 检查输入的x_data和y_data条数相同 | 625941beb57a9660fec3378f |
def test_read_size_partial(self): <NEW_LINE> <INDENT> self.socket.write(response.Response.pack('hello')[:-1]) <NEW_LINE> self.assertEqual(self.connection.read(), []) | Returns one response size is complete, but content is partial | 625941bebf627c535bc130dd |
def __init__(self, label): <NEW_LINE> <INDENT> self.curNode = OrderedDict({ "Label": label, "BreakUp":[] }) | Initialization. | 625941be8c0ade5d55d3e8ce |
def king_white_not_under_threat_close(pos, board): <NEW_LINE> <INDENT> from Models.Horse import Horse <NEW_LINE> from Models.Horse import possible_position_horse_black <NEW_LINE> from Models.Pawn import Pawn <NEW_LINE> new_pos = (pos[0] + 1, pos[1] + 1) <NEW_LINE> if black_piece_on_pos(new_pos, board): <NEW_LINE> <INDENT> if type(board[new_pos[0]][new_pos[1]]) is Pawn: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> new_pos = (pos[0] + 1, pos[1] - 1) <NEW_LINE> if black_piece_on_pos(new_pos, board): <NEW_LINE> <INDENT> if type(board[new_pos[0]][new_pos[1]]) is Pawn: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> for row in range(pos[0] - 1, pos[0] + 2): <NEW_LINE> <INDENT> for col in range(pos[1] - 1, pos[1] + 2): <NEW_LINE> <INDENT> if black_piece_on_pos((row, col), board): <NEW_LINE> <INDENT> if type(board[row][col]) is King: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for row in range(pos[0] - 2, pos[0] + 3): <NEW_LINE> <INDENT> for col in range(pos[1] - 2, pos[1] + 3): <NEW_LINE> <INDENT> if (row != pos[0]) and (col != pos[1]) and black_piece_on_pos((row, col), board): <NEW_LINE> <INDENT> if (type(board[row][col]) is Horse) and possible_position_horse_black((row, col), pos, board): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return True | Consider a 5x5-cube around the king, check if there are any pawns, horses or the king of the other team in here which
are a threat to this position. Consider each individual to improve performance. Pos will be within the board.
:param pos: the position that must be checked
:param board: our board
:return True if not a threat | 625941bed58c6744b4257b6f |
def check_greater_than(name, value, minimum, equality=False): <NEW_LINE> <INDENT> if equality: <NEW_LINE> <INDENT> if value < minimum: <NEW_LINE> <INDENT> msg = 'Unable to set "{0}" to "{1}" since it is less than ' '"{2}"'.format(name, value, minimum) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if value <= minimum: <NEW_LINE> <INDENT> msg = 'Unable to set "{0}" to "{1}" since it is less than ' 'or equal to "{2}"'.format(name, value, minimum) <NEW_LINE> raise ValueError(msg) | Ensure that an object's value is less than a given value.
Parameters
----------
name : str
Description of the value being checked
value : object
Object to check
minimum : object
Minimum value to check against
equality : bool, optional
Whether equality is allowed. Defaluts to False. | 625941bee64d504609d7474e |
def test_flush(self): <NEW_LINE> <INDENT> mock_cmd = MagicMock(return_value=True) <NEW_LINE> with patch.dict(iptables.__salt__, {'cmd.run': mock_cmd}): <NEW_LINE> <INDENT> self.assertTrue(iptables.flush(table='filter', chain='INPUT', family='ipv4')) | Test if it flush the chain in the specified table,
flush all chains in the specified table if not specified chain. | 625941be4f88993c3716bf79 |
def press_return(self, widget): <NEW_LINE> <INDENT> widget.returnPressed.emit() | Simulate a press of the 'Return' key.
| 625941be24f1403a92600a77 |
def GetByPermtick(self, permtick): <NEW_LINE> <INDENT> pass | GetByPermtick(self: MapFileResolver, permtick: str) -> MapFile
Gets the map file matching the specified permtick
permtick: The permtick to match on
Returns: The map file matching the permtick, or null if
not found | 625941be090684286d50ebf1 |
def ghz(n, fid=1.0): <NEW_LINE> <INDENT> dim = 2**n <NEW_LINE> err = dim*(1.0-fid)/(dim-1.0) <NEW_LINE> up,down = _up_down_() <NEW_LINE> ups,upd = up,down <NEW_LINE> for i in range(n-1): <NEW_LINE> <INDENT> ups = np.kron(ups,up) <NEW_LINE> upd = np.kron(upd,down) <NEW_LINE> <DEDENT> GHZ = (ups+upd)/sqrt(2.0) <NEW_LINE> rho = np.dot(GHZ,GHZ.transpose()) <NEW_LINE> rho = (1.0-err)*rho + err*ide(dim)/dim <NEW_LINE> return rho | to generate GHZ state
Parameters
----------
n: number of qubits
fid: default fidelity
Return: GHZ state,
ie. (|00...0> + |11...1>)/sqrt(2) | 625941befb3f5b602dac359f |
def make_frame(t): <NEW_LINE> <INDENT> update(t) <NEW_LINE> gloo.clear((0, 1, 1, 0)) <NEW_LINE> canvas.on_draw(None) <NEW_LINE> return _screenshot((0, 0, canvas.size[0], canvas.size[1]))[:, :, :3] | Vispy default function to compile convas frames into a movie
| 625941be435de62698dfdb5e |
@manager.command <NEW_LINE> def test(): <NEW_LINE> <INDENT> tests = unittest.TestLoader().discover('project/tests', pattern='test*.py') <NEW_LINE> result = unittest.TextTestRunner(verbosity=2).run(tests) <NEW_LINE> if result.wasSuccessful(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 1 | Run tests | 625941bebaa26c4b54cb1031 |
def stop_timer(event): <NEW_LINE> <INDENT> time = _C.cuda_timer_end(event) <NEW_LINE> return time | Given the timer start event, compute the time (in ms) since. | 625941be5510c4643540f2fa |
def center_columns(text, max_width, fillchar=u" "): <NEW_LINE> <INDENT> return fit_text_truncate(text, max_width, just=CENTER, fillchar=fillchar) | alternative to center(); counts multicolumn characters correctly
| 625941bed18da76e235323e2 |
@show.command('sites') <NEW_LINE> @click.argument('group', default='house') <NEW_LINE> def show_sites(group): <NEW_LINE> <INDENT> LaLiParser.showSites(group) | Show sites | 625941bee8904600ed9f1e38 |
def modify_openstack_authentication(self, auth_data, changes): <NEW_LINE> <INDENT> if auth_data.get("type") == "application_credential": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> app_cred_id = auth_data.pop("application_credential_id", None) <NEW_LINE> app_cred_secret = auth_data.pop("application_credential_secret", None) <NEW_LINE> if app_cred_id and app_cred_secret: <NEW_LINE> <INDENT> auth_data["type"] = "application_credential" <NEW_LINE> auth_data["id"] = app_cred_id <NEW_LINE> auth_data["secret"] = app_cred_secret <NEW_LINE> auth_data.pop("username", None) <NEW_LINE> auth_data.pop("password", None) <NEW_LINE> changes["nova"] = "application_credential" | Modify the OpenStack credential type | 625941beb7558d58953c4e28 |
def write(fn, ds, form='binary'): <NEW_LINE> <INDENT> niml_ds = to_niml(ds) <NEW_LINE> niml_dset.write(fn, niml_ds, form=form) | Write a Dataset to a file in NIML format
Parameters
----------
fn: str
Filename
ds: mvpa2.base.Dataset
Dataset to be stored
form: str
Data format: 'binary' or 'text' or 'base64' | 625941be57b8e32f524833a8 |
def wish_list(self): <NEW_LINE> <INDENT> return super(GameManager, self).get_query_set().filter(owned=False) | retrieves unowned games | 625941be4527f215b584c369 |
def is_cache_valid(self): <NEW_LINE> <INDENT> if os.path.isfile(self.cache_path_cache): <NEW_LINE> <INDENT> mod_time = os.path.getmtime(self.cache_path_cache) <NEW_LINE> current_time = time() <NEW_LINE> if (mod_time + self.cache_max_age) > current_time: <NEW_LINE> <INDENT> if os.path.isfile(self.cache_path_inventory): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False | Determines if the cache files have expired, or if it is still valid | 625941be4f88993c3716bf7a |
def close(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if self.file_object is not None: <NEW_LINE> <INDENT> self.logger.debug("Closing the file") <NEW_LINE> self.file_object.close() <NEW_LINE> if self.process.poll() is None: <NEW_LINE> <INDENT> self.logger.debug("Killing process (PID: {0})".format(self.process.pid)) <NEW_LINE> try: <NEW_LINE> <INDENT> self.process.kill() <NEW_LINE> <DEDENT> except OSError as error: <NEW_LINE> <INDENT> self.logger.debug(error) <NEW_LINE> <DEDENT> <DEDENT> self.closed = True <NEW_LINE> <DEDENT> self.logger.debug("File Closed") <NEW_LINE> <DEDENT> return | Kills tho process and closes the file
:postcondition: file_object has been closed and self.closed is True | 625941becc40096d61595860 |
def reverseVowels(self, s): <NEW_LINE> <INDENT> v = ['a','e','i','o','u','A','E','I','O','U'] <NEW_LINE> vsaver = [] <NEW_LINE> pos = [] <NEW_LINE> for i,char in enumerate(s): <NEW_LINE> <INDENT> if char in v: <NEW_LINE> <INDENT> vsaver.append(char) <NEW_LINE> pos.append(i) <NEW_LINE> <DEDENT> <DEDENT> vsaver.reverse() <NEW_LINE> out = list(s) <NEW_LINE> for i,char in enumerate(vsaver): <NEW_LINE> <INDENT> out[pos[i]] = char <NEW_LINE> <DEDENT> return "".join(out) | :type s: str
:rtype: str | 625941be498bea3a759b99bf |
@pytest.fixture(scope="session") <NEW_LINE> def get_neutron_security_group_steps(get_neutron_client): <NEW_LINE> <INDENT> def _get_steps(**credentials): <NEW_LINE> <INDENT> return steps.NeutronSecurityGroupSteps( get_neutron_client(**credentials).security_groups) <NEW_LINE> <DEDENT> return _get_steps | Callable session fixture to get neutron security group steps.
Args:
get_neutron_client (function): function to get instantiated neutron
client
Returns:
function: function to get instantiated neutron security group steps | 625941be377c676e912720b8 |
def changelist_view(self, request, extra_context=None): <NEW_LINE> <INDENT> opts = self.model._meta <NEW_LINE> app_label = opts.app_label <NEW_LINE> out = self.get_changelist_context(request) <NEW_LINE> if type(out) != dict: <NEW_LINE> <INDENT> return out <NEW_LINE> <DEDENT> context = out <NEW_LINE> context.update(extra_context or {}) <NEW_LINE> return render_to_response(self.change_list_template or [ 'newman/%s/%s/change_list.html' % (app_label, opts.object_name.lower()), 'newman/%s/change_list.html' % app_label, 'newman/change_list.html' ], context, context_instance=template.RequestContext(request)) | The 'change list' admin view for this model. | 625941be7d847024c06be1c8 |
def get_resource_root_service_by_id(resource_id, db_session): <NEW_LINE> <INDENT> resource = ResourceService.by_resource_id(resource_id, db_session=db_session) <NEW_LINE> if resource is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return get_resource_root_service(resource, db_session=db_session) | Retrieves the service-specialized resource corresponding to the top-level resource in the tree hierarchy.
.. seealso::
- :func:`get_resource_root_service` for same operation but directly using the resource | 625941be4e696a04525c935b |
def add_toc_page(self): <NEW_LINE> <INDENT> assert not self.toc_page <NEW_LINE> self.toc_page = self.add_html('toc.html', '') <NEW_LINE> self.add_spine_item(self.toc_page, False, -100) <NEW_LINE> self.add_guide_item('toc.html', 'Table of Contents', 'toc') | Add a table of contents page to the epub | 625941be67a9b606de4a7dcb |
def project_and_clip_raster(input_raster, reference_raster, output_raster): <NEW_LINE> <INDENT> srs_data = gdal.Open(input_raster, GA_ReadOnly) <NEW_LINE> srs_proj = srs_data.GetProjection() <NEW_LINE> srs_geotrans = srs_data.GetGeoTransform() <NEW_LINE> ref_data = gdal.Open(reference_raster, GA_ReadOnly) <NEW_LINE> ref_proj = ref_data.GetProjection() <NEW_LINE> ref_geotrs = ref_data.GetGeoTransform() <NEW_LINE> Ncols = ref_data.RasterXSize <NEW_LINE> Nrows = ref_data.RasterYSize <NEW_LINE> out_data = gdal.GetDriverByName('GTiff').Create(output_raster, Ncols, Nrows, 1, GDT_Byte) <NEW_LINE> out_data.SetGeoTransform(ref_geotrs) <NEW_LINE> out_data.SetProjection(ref_proj) <NEW_LINE> gdal.ReprojectImage(srs_data,out_data,srs_proj,ref_proj, GRA_NearestNeighbour) <NEW_LINE> del out_data <NEW_LINE> response_dict = {'success': 'True', 'message': 'Raster projection/clipping was successful'} <NEW_LINE> return response_dict | :param input_raster:
:param reference_raster:
:param output_raster:
:return:
For images leave the default nearest neighbor interpolation; else pass the method required | 625941be63d6d428bbe443fe |
def _get_attrs(self, attrs:dict={}): <NEW_LINE> <INDENT> _id = "" <NEW_LINE> _class = "" <NEW_LINE> _style = "" <NEW_LINE> for key, value in attrs.items(): <NEW_LINE> <INDENT> if key == "id": <NEW_LINE> <INDENT> _id = value <NEW_LINE> <DEDENT> elif key == "class": <NEW_LINE> <INDENT> _class = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _style += str(key) + ":" + str(value) + ";" <NEW_LINE> <DEDENT> <DEDENT> return _id, _class, _style | Erzeugt aus einem attrs Element id, class und style Angaben.
Bei Angabe von class und id wird zugeordnet.
Alles andere wird als style verwendet
Parameters
----------
attrs : dict, optional
dict mit Attributen. The default is {}.
Returns
-------
_id : str
Die *id* Angabe aus attrs
_class : str
Die *class* Angabe aus attrs.
_style : str
Die *style* Angabe aus attrs. | 625941becc40096d61595861 |
def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(AccountRatio, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result | Returns the model properties as a dict | 625941bef7d966606f6a9f10 |
def unload(self): <NEW_LINE> <INDENT> for action in self.actions: <NEW_LINE> <INDENT> self.iface.removePluginMenu( self.tr(u'&Geological Data Processing'), action) <NEW_LINE> self.iface.removeToolBarIcon(action) <NEW_LINE> <DEDENT> del self.toolbar | Removes the plugin menu item and icon from QGIS GUI. | 625941be7b25080760e39369 |
def complete(self, code, pos): <NEW_LINE> <INDENT> c = self.jcompleter <NEW_LINE> jres = c.complete(code, pos) <NEW_LINE> return list(_scala_seq_to_py(jres.candidates())) | Performs code completion for a block of Scala code.
Parameters
----------
code : str
Scala code to perform completion on
pos : int
Cursor position
Returns
-------
List[str]
Candidates for code completion | 625941bead47b63b2c509e8f |
def __init__(self, cache_name, clock, max_len=0, expiry_ms=0, reset_expiry_on_get=False): <NEW_LINE> <INDENT> self._cache_name = cache_name <NEW_LINE> self._clock = clock <NEW_LINE> self._max_len = max_len <NEW_LINE> self._expiry_ms = expiry_ms <NEW_LINE> self._reset_expiry_on_get = reset_expiry_on_get <NEW_LINE> self._cache = {} <NEW_LINE> self.metrics = register_cache(cache_name, self._cache) | Args:
cache_name (str): Name of this cache, used for logging.
clock (Clock)
max_len (int): Max size of dict. If the dict grows larger than this
then the oldest items get automatically evicted. Default is 0,
which indicates there is no max limit.
expiry_ms (int): How long before an item is evicted from the cache
in milliseconds. Default is 0, indicating items never get
evicted based on time.
reset_expiry_on_get (bool): If true, will reset the expiry time for
an item on access. Defaults to False. | 625941be38b623060ff0acfe |
def takePictures2(self, numberOfPictures, folderPath, fileName, overwrite): <NEW_LINE> <INDENT> if not self.proxy: <NEW_LINE> <INDENT> self.proxy = ALProxy("ALPhotoCapture") <NEW_LINE> <DEDENT> return self.proxy.takePictures(numberOfPictures, folderPath, fileName, overwrite) | Takes several pictures as quickly as possible
:param int numberOfPictures: Number of pictures to take
:param str folderPath: Folder where the pictures are saved.
:param str fileName: Filename used to save the pictures.
:param bool overwrite: If false and the filename already exists, an error is thrown.
:returns AL::ALValue: List of all saved files: [[filename1, filename2...]] | 625941be66656f66f7cbc0b9 |
def get_java_handler(zip_file_content, handler, main_file): <NEW_LINE> <INDENT> if not is_jar_archive(zip_file_content): <NEW_LINE> <INDENT> with zipfile.ZipFile(BytesIO(zip_file_content)) as zip_ref: <NEW_LINE> <INDENT> jar_entries = [e for e in zip_ref.infolist() if e.filename.endswith('.jar')] <NEW_LINE> if len(jar_entries) != 1: <NEW_LINE> <INDENT> raise ClientError('Expected exactly one *.jar entry in zip file, found %s' % len(jar_entries)) <NEW_LINE> <DEDENT> zip_file_content = zip_ref.read(jar_entries[0].filename) <NEW_LINE> LOG.info('Found jar file %s with %s bytes in Lambda zip archive' % (jar_entries[0].filename, len(zip_file_content))) <NEW_LINE> main_file = new_tmp_file() <NEW_LINE> save_file(main_file, zip_file_content) <NEW_LINE> <DEDENT> <DEDENT> if is_jar_archive(zip_file_content): <NEW_LINE> <INDENT> def execute(event, context): <NEW_LINE> <INDENT> result, log_output = lambda_executors.EXECUTOR_LOCAL.execute_java_lambda( event, context, handler=handler, main_file=main_file) <NEW_LINE> return result <NEW_LINE> <DEDENT> return execute, zip_file_content <NEW_LINE> <DEDENT> raise ClientError(error_response( 'Unable to extract Java Lambda handler - file is not a valid zip/jar files', 400, error_type='ValidationError')) | Creates a Java handler from an uploaded ZIP or JAR.
:type zip_file_content: bytes
:param zip_file_content: ZIP file bytes.
:type handler: str
:param handler: The lambda handler path.
:type main_file: str
:param main_file: Filepath to the uploaded ZIP or JAR file.
:returns: function or flask.Response | 625941beb545ff76a8913d25 |
def render_preference_panel(self, req, panel): <NEW_LINE> <INDENT> if req.method == 'POST': <NEW_LINE> <INDENT> self._do_save(req) <NEW_LINE> req.redirect(req.href.prefs(panel or None)) <NEW_LINE> <DEDENT> team_member = None <NEW_LINE> if req.authname not in [None, 'anonymous'] and Role.TEAM_MEMBER in req.perm: <NEW_LINE> <INDENT> name = req.authname <NEW_LINE> team_member = self.tmmm.get(name=name) <NEW_LINE> if team_member == None: <NEW_LINE> <INDENT> team_member = self.tmmm.create(name=name) <NEW_LINE> <DEDENT> <DEDENT> calendars = list() <NEW_LINE> ac = AgiloCalendar(day=datetime.today()) <NEW_LINE> for cal in range(2): <NEW_LINE> <INDENT> calendars.append(team_member.calendar.get_hours_for_interval(ac.get_first_day(), ac.get_last_day())) <NEW_LINE> ac = ac.next_month() <NEW_LINE> <DEDENT> return 'agilo_prefs_%s.html' % (panel or 'general'), { 'settings': {'session': req.session, 'session_id': req.session.sid}, 'teams': self.tmm.select(), 'team_member': team_member, 'calendars': calendars, } | Process a request for a preference panel. This builds the
Panel for the team preferences | 625941be097d151d1a222d6b |
def id_generator(size=6): <NEW_LINE> <INDENT> import random <NEW_LINE> import string <NEW_LINE> chars = string.ascii_lowercase <NEW_LINE> return proj + "-" + (''.join(random.choice(chars) for _ in range(size))) | Generate a container ID | 625941be32920d7e50b280dd |
def convert_reliefweb_2_sf(): <NEW_LINE> <INDENT> type2label_id = {'crimeviolence':8, 'med':3, 'search':4, 'food':1, 'out-of-domain':9, 'infra':2, 'water':7, 'shelter':5, 'regimechange':10, 'evac':0, 'terrorism':11, 'utils':6} <NEW_LINE> typemap={ 'Safety and Security': ['terrorism', 'crimeviolence'], 'Shelter and Non-Food Items': ['shelter'], 'Food and Nutrition': ['food'], 'HIV/Aids': ['med'], 'Water Sanitation Hygiene': ['water'], 'Health': ['med'], 'Logistics and Telecommunications': ['utils'], 'Recovery and Reconstruction': ['shelter', 'infra'], 'Disaster Management': ['search','evac'], 'Peacekeeping and Peacebuilding': ['regimechange'], 'Coordination': ['out-of-domain'], 'Contributions': ['out-of-domain'], 'Education': ['out-of-domain'], 'Gender': ['out-of-domain'], 'Agriculture': ['out-of-domain'], 'Mine Action': ['out-of-domain'], 'Climate Change and Environment': ['out-of-domain'], 'Protection and Human Rights': ['out-of-domain'] } <NEW_LINE> writefile=codecs.open('/scratch/wyin3/dickens_save_dataset/LORELEI/2019_new_data/reliefweb_2_sf.txt', 'w','utf-8') <NEW_LINE> size=0 <NEW_LINE> with open('/scratch/wyin3/dickens_save_dataset/LORELEI/2019_new_data/reports.csv', mode='r') as csvfile: <NEW_LINE> <INDENT> csv_reader = csv.DictReader(csvfile) <NEW_LINE> for row in csv_reader: <NEW_LINE> <INDENT> title = row['title'] <NEW_LINE> text = row['body'] <NEW_LINE> if len(row['theme']) > 0 and len(row['country']) > 0: <NEW_LINE> <INDENT> theme_list = ast.literal_eval(row['theme']) <NEW_LINE> theam_id_list = [] <NEW_LINE> theam_name_list = [] <NEW_LINE> for theme in theme_list: <NEW_LINE> <INDENT> target_type_list = typemap.get(theme) <NEW_LINE> if target_type_list is not None: <NEW_LINE> <INDENT> for type in target_type_list: <NEW_LINE> <INDENT> if type not in theam_name_list: <NEW_LINE> <INDENT> if len(theam_name_list)==0 or (len(theam_name_list)>0 and type!='out-of-domain'): <NEW_LINE> <INDENT> idd = type2label_id.get(type) <NEW_LINE> theam_id_list.append(str(idd)) <NEW_LINE> theam_name_list.append(type) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if len(theam_id_list)>0 and len(theam_name_list)>0: <NEW_LINE> <INDENT> writefile.write(' '.join(theam_id_list)+'\t'+' '.join(theam_name_list)+'\t'+text.strip()+'\n') <NEW_LINE> size+=1 <NEW_LINE> if size %1000==0: <NEW_LINE> <INDENT> print('size:', size) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> writefile.close() <NEW_LINE> csvfile.close() | here, we use the old mapping, so that the training data load function has the same input as BBN | 625941be8e05c05ec3eea281 |
def headline2idx(list_idx, curr_max_length, is_input): <NEW_LINE> <INDENT> if is_input: <NEW_LINE> <INDENT> if len(list_idx) >= curr_max_length - 1: <NEW_LINE> <INDENT> return list_idx[:curr_max_length - 1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> list_idx = list_idx + [eos_tag_location, ] <NEW_LINE> return padding(list_idx, curr_max_length - 1, False) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if len(list_idx) == curr_max_length: <NEW_LINE> <INDENT> list_idx[-1] = eos_tag_location <NEW_LINE> return list_idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> list_idx = list_idx + [eos_tag_location, ] <NEW_LINE> return padding(list_idx, curr_max_length, False) | if space add <eos> tag in input case, input size = curr_max_length-1
always add <eos> tag in predication case, size = curr_max_length
always right pad | 625941bed8ef3951e324344c |
def spellcheck(text, **kwargs): <NEW_LINE> <INDENT> r = collect("spellcheck", text=text, **kwargs) <NEW_LINE> return r['text'] | Spellcheck some text. | 625941be099cdd3c635f0b6c |
def sizeToHuman(self, size_in_bytes): <NEW_LINE> <INDENT> radix = int(math.log(max(size_in_bytes, 1), 2)/10) <NEW_LINE> mantissa = size_in_bytes / 2.0**(radix*10) <NEW_LINE> suffix = ' KMGTPE'[radix] <NEW_LINE> return '%3.1f %sB' %(mantissa, suffix) | Convert a size of bytes into a human readable format | 625941be046cf37aa974cc59 |
def continuous_forward(self, x, out_seq_len): <NEW_LINE> <INDENT> out_seq_gen = torch.Tensor() <NEW_LINE> pred, hn = self.forward(x) <NEW_LINE> out_seq_gen = torch.cat((out_seq_gen, pred), dim=1) <NEW_LINE> for i in range(out_seq_len-1): <NEW_LINE> <INDENT> x = pred.unsqueeze(2) <NEW_LINE> out_seq_gen = torch.cat((out_seq_gen, pred), dim=1) <NEW_LINE> pred, hn = self.forward(x, hn) <NEW_LINE> <DEDENT> return out_seq_gen, hn | Use this for continuous output sequence generation of arbitrarily long lengths
:return: output of size seq_length, can generate arbitrarily long sequence of outputs
:param x: only the input sequence to begin with | 625941be30dc7b7665901879 |
def addFile(self,feature_class, feature_type, file): <NEW_LINE> <INDENT> sampleRate, data = wavfile.read(file) <NEW_LINE> features = self.featureExtractor(data, feature_type) <NEW_LINE> self.storeFeatures(features, feature_class) | Add a new audio file to this dataset. This class extracts features
from the audio files and stores them in the feature vector.
@param feature_class: The class of the audio file(e.g. walking, wc, toothbrush)
@param file: The audio file | 625941be57b8e32f524833a9 |
def _send_packet(self): <NEW_LINE> <INDENT> cmd = self._crnt_cmd <NEW_LINE> if cmd.get_empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> max_packets = self._interface.get_packet_count() <NEW_LINE> if len(self._commands_to_read) >= max_packets: <NEW_LINE> <INDENT> self._read_packet() <NEW_LINE> <DEDENT> data = cmd.encode_data() <NEW_LINE> try: <NEW_LINE> <INDENT> self._interface.write(list(data)) <NEW_LINE> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> self._abort_all_transfers(exception) <NEW_LINE> raise <NEW_LINE> <DEDENT> self._commands_to_read.append(cmd) <NEW_LINE> self._crnt_cmd = _Command(self._packet_size) | Send a single packet to the interface
This function guarentees that the number of packets
that are stored in daplink's buffer (the number of
packets written but not read) does not exceed the
number supported by the given device. | 625941be99cbb53fe6792af7 |
def sinc(x): <NEW_LINE> <INDENT> return where(equal(x, 0.0), 1.0, old_div(sin(pi * x), (pi * x))) | sinc(x) returns sin(pi*x)/(pi*x) at all points of array x.
| 625941beff9c53063f47c105 |
def _on_scheme(self, c, remaining): <NEW_LINE> <INDENT> if c in SCHEME_CHARS: <NEW_LINE> <INDENT> self._buffer += c.lower() <NEW_LINE> <DEDENT> elif c == ":": <NEW_LINE> <INDENT> if self.state_override is not None: <NEW_LINE> <INDENT> if (self._buffer in SPECIAL_SCHEMES) != ( self.url.scheme in SPECIAL_SCHEMES ): <NEW_LINE> <INDENT> raise _UrlParserReturn() <NEW_LINE> <DEDENT> elif ( self.url.includes_credentials or self.url.port is not None ) and self._buffer == "file": <NEW_LINE> <INDENT> raise _UrlParserReturn() <NEW_LINE> <DEDENT> elif self.url.scheme == "file" and ( self.url.hostname is None or self.url.hostname == "" ): <NEW_LINE> <INDENT> raise _UrlParserReturn() <NEW_LINE> <DEDENT> <DEDENT> self.url._scheme = self._buffer <NEW_LINE> if self.state_override is not None: <NEW_LINE> <INDENT> if ( self.url.scheme in SPECIAL_SCHEMES and SPECIAL_SCHEMES[self.url.scheme] == self.url.port ): <NEW_LINE> <INDENT> self.url._port = None <NEW_LINE> <DEDENT> raise _UrlParserReturn() <NEW_LINE> <DEDENT> self._buffer = "" <NEW_LINE> if self.url.scheme == "file": <NEW_LINE> <INDENT> if not remaining.startswith("//"): <NEW_LINE> <INDENT> self.validation_error = True <NEW_LINE> <DEDENT> self._state = PARSER_STATE_FILE <NEW_LINE> <DEDENT> elif ( self.url.scheme in SPECIAL_SCHEMES and self.base is not None and self.base.scheme == self.url.scheme ): <NEW_LINE> <INDENT> self._state = PARSER_STATE_SPECIAL_RELATIVE_OR_AUTHORITY <NEW_LINE> <DEDENT> elif self.url.scheme in SPECIAL_SCHEMES: <NEW_LINE> <INDENT> self._state = PARSER_STATE_SPECIAL_AUTHORITY_SLASHES <NEW_LINE> <DEDENT> elif remaining.startswith("/"): <NEW_LINE> <INDENT> self._state = PARSER_STATE_PATH_OR_AUTHORITY <NEW_LINE> self._pointer += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.url.cannot_be_base_url = True <NEW_LINE> self.url._path.append("") <NEW_LINE> self._state = PARSER_STATE_CANNOT_BE_BASE_URL <NEW_LINE> <DEDENT> <DEDENT> elif self.state_override is None: <NEW_LINE> <INDENT> self._buffer = "" <NEW_LINE> self._state = PARSER_STATE_NO_SCHEME <NEW_LINE> self._pointer = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.validation_error = True <NEW_LINE> raise UrlParserError() | Handles the SCHEME state. | 625941bef8510a7c17cf960a |
def __call__(self): <NEW_LINE> <INDENT> r = self.rel_space.sample() <NEW_LINE> return RelationHypothesis( relation=r, rel_space=self.rel_space, distractor_thing_space=self.distractor_thing_space ) | generates a single RelationHypothesis | 625941bed164cc6175782c5d |
def diagnose(self, symptoms): <NEW_LINE> <INDENT> node = self.root <NEW_LINE> return self.diagnose_helper(node, symptoms) | :param symptoms: the list of symptoms we use to diagnose the illness
:return: the name of the illness our tree diagnosed | 625941be460517430c39409b |
def tearDown(self): <NEW_LINE> <INDENT> del self.place | Deletes the instance of Place | 625941be8e7ae83300e4aedc |
def str2datetime(self, s): <NEW_LINE> <INDENT> return datetime.datetime(*[int(y) for y in [x for x in re.split(r'[\s+/:-]', s.strip()) if x][:6]]) | Convert a timestamp string to a datetime object | 625941be63f4b57ef000102f |
def _write_data(self): <NEW_LINE> <INDENT> with h5py.File(self._out_fname, "w", libver='latest') as out_f: <NEW_LINE> <INDENT> for key in self._result: <NEW_LINE> <INDENT> if "type" in self._result[key]: <NEW_LINE> <INDENT> out_f.create_dataset(self._result[key]['path'], data=self._result[key]['data'], dtype=self._result[key]['type']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_f.create_dataset(self._result[key]['path'], data=self._result[key]['data']) <NEW_LINE> <DEDENT> <DEDENT> metadata_base_path = "collection" <NEW_LINE> today = str(date.today()) <NEW_LINE> out_f.create_dataset("{}/creation_date".format(metadata_base_path), data=today) <NEW_LINE> name = "{}/{}".format(metadata_base_path, "version") <NEW_LINE> out_f.create_dataset(name, data=__version__) <NEW_LINE> name = "{}/{}".format(metadata_base_path, "method") <NEW_LINE> out_f.create_dataset(name, data=self._method) <NEW_LINE> out_f.flush() | Writes the result dictionary and additional metadata into a file.
| 625941be5f7d997b871749a4 |
def one_pass_with_error() -> float: <NEW_LINE> <INDENT> error = 0.0 <NEW_LINE> for (x, y) in data: <NEW_LINE> <INDENT> error += self.back_propagate(x, y, learning_rate, momentum_factor) <NEW_LINE> <DEDENT> return error | Computes a single backpropagation pass keeping track of error
Returns:
error of pass | 625941befb3f5b602dac35a0 |
def contains_letters(response): <NEW_LINE> <INDENT> return bool(re.search(r'[A-Za-z]', response)) | A user inputs a value which is then checked against the following regex
:param response: User input
:return: A boolean value if a user entry contains letters or not | 625941bed58c6744b4257b70 |
def current(self): <NEW_LINE> <INDENT> if Global.current_member_list() is not None: <NEW_LINE> <INDENT> return self in Global.current_member_list().members <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Is this member in current member list? | 625941bea219f33f3462887d |
def to_file(self, file): <NEW_LINE> <INDENT> for line in self.to_line_list(): <NEW_LINE> <INDENT> file.write(line) <NEW_LINE> file.write("\n") | Writes a textual description of the image data to a file.
Args:
file: A File object that receives the image data | 625941be07f4c71912b11390 |
def updateOneService(reservation, old_service): <NEW_LINE> <INDENT> new_service = [] <NEW_LINE> new_service.append(old_service[INDEXDriverName]) <NEW_LINE> new_service.append(old_service[INDEXVehiclePlate]) <NEW_LINE> new_service.append(reservation[INDEXClientNameInReservation]) <NEW_LINE> startHour, endHour = calculateDelay(old_service, reservation) <NEW_LINE> new_service.append(startHour) <NEW_LINE> new_service.append(endHour) <NEW_LINE> new_service.append(reservation[INDEXCircuitInReservation]) <NEW_LINE> new_service.append(reservation[INDEXCircuitKmsInReservation]) <NEW_LINE> duration = durationReservation(reservation) <NEW_LINE> new_accumulated_hours = add(old_service[INDEXAccumulatedTime], duration) <NEW_LINE> allowed_time_left = diff(TIMELimit, new_accumulated_hours) <NEW_LINE> new_accumulated_kms = int(old_service[INDEXAccumulatedKms]) + int(new_service[INDEXCircuitKms]) <NEW_LINE> allowed_kms_left = int(old_service[INDEXINDEXVehicAutonomy]) - new_accumulated_kms <NEW_LINE> if allowed_time_left < TIMEThreshold: <NEW_LINE> <INDENT> new_service.append(STATUSTerminated) <NEW_LINE> <DEDENT> elif allowed_kms_left < AUTONThreshold: <NEW_LINE> <INDENT> new_service.append(STATUSCharging) <NEW_LINE> new_service.append(new_accumulated_hours) <NEW_LINE> new_service.append(old_service[INDEXINDEXVehicAutonomy]) <NEW_LINE> new_service.append('0') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_service.append(STATUSStandBy) <NEW_LINE> new_service.append(new_accumulated_hours) <NEW_LINE> new_service.append(old_service[INDEXINDEXVehicAutonomy]) <NEW_LINE> new_service.append(str(new_accumulated_kms)) <NEW_LINE> <DEDENT> return new_service | Assign a driver with her vehicle to a service that was reserved.
Requires:
reservation is a sublist of a list with the structure as in the output of
consultStatus.readReservationsFile; service is a sublist of a list with
the structure as in the output of consultStatus.waiting4ServicesList.
Ensures:
a list with the structure of the sublists of consultStatus.waiting4ServicesList
where the driver and her vehicle are assigned to a reservation
(unless the first condition of the ifelse block is true. In that case the
structure of the list is the same as the sublists of the output of
consultStatus.readServicesFile). See specifications of UpdateServices for more
information. | 625941bef8510a7c17cf960b |
def aStarSearch(problem, heuristic=nullHeuristic): <NEW_LINE> <INDENT> fringe = util.PriorityQueue() <NEW_LINE> initList = [] <NEW_LINE> root = (problem.getStartState(), initList, heuristic(problem.getStartState(),problem)) <NEW_LINE> fringe.push(root,heuristic(problem.getStartState(),problem)) <NEW_LINE> return solveTheTraversalProblem(problem, 3, fringe, heuristic) | Search the node that has the lowest combined cost and heuristic first. | 625941beb5575c28eb68df0e |
def handle_msgtype06(self, data): <NEW_LINE> <INDENT> log.debug("[handle_msgtype06] Timeout Received data {0}".format(self._toString(data))) <NEW_LINE> self.pmExpectedResponse = [] <NEW_LINE> if self.pmDownloadMode: <NEW_LINE> <INDENT> self._delayDownload() <NEW_LINE> log.debug("[handle_msgtype06] Timeout Received - Going to Standard Mode and going to try download again soon") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._sendAck() | MsgType=06 - Time out
Timeout message from the PM, most likely we are/were in download mode | 625941bea8370b77170527b0 |
def K_ws(self, material, fem_solver, Eulerw, Eulerp=None, elem=0): <NEW_LINE> <INDENT> meshes = self.meshes <NEW_LINE> function_spaces = self.function_spaces <NEW_LINE> Bases_w = function_spaces[1].Bases <NEW_LINE> Bases_s = function_spaces[2].Bases <NEW_LINE> Nw = np.zeros((Bases_w.shape[0]*self.ndim,self.ndim),dtype=np.float64) <NEW_LINE> Ns = np.zeros((self.ndim,Bases_s.shape[0]*self.ndim),dtype=np.float64) <NEW_LINE> stiffness = np.zeros((Bases_w.shape[0]*self.ndim,Bases_s.shape[0]*self.ndim)) <NEW_LINE> AllGauss = function_spaces[0].AllGauss <NEW_LINE> for counter in range(AllGauss.shape[0]): <NEW_LINE> <INDENT> Nw_Ns = self.K_ws_Integrand(Nw, Ns, Bases_w[:,counter], Bases_s[:,counter]) <NEW_LINE> stiffness += Nw_Ns*self.detJ[counter] <NEW_LINE> <DEDENT> return -stiffness | Get stiffness matrix of the system | 625941be4d74a7450ccd40d3 |
def _update_fields(self, account): <NEW_LINE> <INDENT> self._clear_all() <NEW_LINE> if self.txt_password.get_text() == '': <NEW_LINE> <INDENT> self.remember_password.set_sensitive(False) <NEW_LINE> self.auto_login.set_sensitive(False) <NEW_LINE> <DEDENT> if account == '': <NEW_LINE> <INDENT> self.remember_account.set_sensitive(False) <NEW_LINE> self.txt_password.set_text('') <NEW_LINE> self.txt_password.set_sensitive(False) <NEW_LINE> return <NEW_LINE> <DEDENT> self.remember_account.set_sensitive(True) <NEW_LINE> if account in self.config.d_user_service: <NEW_LINE> <INDENT> service = self.config.d_user_service[account] <NEW_LINE> if service in self.services: <NEW_LINE> <INDENT> service_data = self.services[service] <NEW_LINE> self.server_host = service_data['host'] <NEW_LINE> self.server_port = service_data['port'] <NEW_LINE> self.config.service = service <NEW_LINE> <DEDENT> <DEDENT> if account in self.accounts: <NEW_LINE> <INDENT> attr = int(self.remembers[account]) <NEW_LINE> self.remember_account.set_sensitive(False) <NEW_LINE> self.forget_me.set_sensitive(True) <NEW_LINE> self.btn_status.set_status(int(self.status[account])) <NEW_LINE> passw = self.accounts[account] <NEW_LINE> avatar_path = self.config_dir.join(self.server_host, account, 'avatars', 'last') <NEW_LINE> self.avatar.set_from_file(avatar_path) <NEW_LINE> if attr == 3: <NEW_LINE> <INDENT> self.txt_password.set_text(base64.b64decode(passw)) <NEW_LINE> self.txt_password.set_sensitive(False) <NEW_LINE> self.auto_login.set_active(True) <NEW_LINE> <DEDENT> elif attr == 2: <NEW_LINE> <INDENT> self.txt_password.set_text(base64.b64decode(passw)) <NEW_LINE> self.txt_password.set_sensitive(False) <NEW_LINE> self.remember_password.set_active(True) <NEW_LINE> <DEDENT> elif attr == 1: <NEW_LINE> <INDENT> self.remember_account.set_active(True) <NEW_LINE> self.remember_account.set_sensitive(True) <NEW_LINE> self.remember_password.set_sensitive(False) <NEW_LINE> self.auto_login.set_sensitive(False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.show_error(_( 'Error while reading user config')) <NEW_LINE> self._clear_all() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.avatar.set_from_file(gui.theme.logo) | update the different fields according to the account that is
on the account entry | 625941be26238365f5f0ed7a |
def eq_of_motion(w, t, p): <NEW_LINE> <INDENT> x, x_dot = w <NEW_LINE> m, desired, PID_force, F_disturb = p <NEW_LINE> sysODE = [x_dot, 1.0/m * (PID_force - F_disturb)] <NEW_LINE> return sysODE | Defines the differential equations for the coupled spring-mass system.
Arguments:
w : vector of the state variables:
t : time
p : vector of the parameters: | 625941bebe8e80087fb20b57 |
def load_like_n_dislikes(num): <NEW_LINE> <INDENT> que = Question.objects.all() <NEW_LINE> ans = Answer.objects.all() <NEW_LINE> for i in range(num): <NEW_LINE> <INDENT> data = dict() <NEW_LINE> data['question'] = random.choice(que) <NEW_LINE> data['answer'] = random.choice(ans) <NEW_LINE> data['like'] = random.randint(10, 100) <NEW_LINE> data['dislike'] = random.randint(10, 100) <NEW_LINE> like_obj = LikeDislike.objects.create(**data) <NEW_LINE> print('Like & Dislike Created: ', like_obj) | :param num:
:return: | 625941be63b5f9789fde6ff5 |
def lengthOfLongestSubstring(self, s): <NEW_LINE> <INDENT> if not s: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> cs = set() <NEW_LINE> l,r = 0 , 0 <NEW_LINE> res = 0 <NEW_LINE> for cur_idx, cur in enumerate(s): <NEW_LINE> <INDENT> while cur in cs: <NEW_LINE> <INDENT> cs.remove(s[l]) <NEW_LINE> l +=1 <NEW_LINE> <DEDENT> res = max(res, cur_idx - l + 1) <NEW_LINE> cs.add(cur) <NEW_LINE> <DEDENT> return res | "abcabcbb"
v
v
| 625941bee1aae11d1e749bc5 |
def maximize_ratio(cnt: scipy.sparse.csr_matrix, indices: List[str], avg: Optional[str] = 'exist'): <NEW_LINE> <INDENT> cnt = cnt.tocsr() <NEW_LINE> ratios = cnt + cnt.T <NEW_LINE> ratios.data = 1.0 / ratios.data <NEW_LINE> ratios = ratios.multiply(cnt) <NEW_LINE> metrics = np.array(ratios.sum(axis=1).flatten())[0] <NEW_LINE> if avg == 'all': <NEW_LINE> <INDENT> metrics /= len(metrics) <NEW_LINE> <DEDENT> elif avg == 'exist': <NEW_LINE> <INDENT> ridx, _ = (ratios + ratios.T).nonzero() <NEW_LINE> for i, c in zip(*np.unique(ridx, return_counts=True)): <NEW_LINE> <INDENT> metrics[i] /= c <NEW_LINE> <DEDENT> <DEDENT> positions = np.argsort(-metrics) <NEW_LINE> sortedids = np.array(indices)[positions] <NEW_LINE> metrics = metrics[positions] <NEW_LINE> info = {} <NEW_LINE> return positions, sortedids, metrics, info | Rank items based simple ratios, and calibrate row sums as scores
Parameters:
-----------
cnt : scipy.sparse.dok.dok_matrix
Quadratic sparse matrix with frequency data
indices : List[str]
Identifiers, e.g. UUID4, of each row/column of the `cnt` matrix.
avg : Optional[str] = 'exists'
How to compute denominator for averaging.
- 'all': divide the sum of ratios by the row length
- 'exist': divide the sum of ratios by the number of ratios in the row
Returns:
--------
positions : np.array[uint64]
The array positions to order/sort the original data by indexing.
sortedids : np.array[any]
The reordered item IDs
metrics : np.array[float]
The metric for each item ID. Also sorted in descending order.
info : dict
Further information depending on the selected `method`
Example:
--------
import bwsample as bws
evaluations = (
([1, 0, 0, 2], ['A', 'B', 'C', 'D']),
([1, 0, 0, 2], ['A', 'B', 'C', 'D']),
([2, 0, 0, 1], ['A', 'B', 'C', 'D']),
([0, 1, 2, 0], ['A', 'B', 'C', 'D']),
([0, 1, 0, 2], ['A', 'B', 'C', 'D']),
)
agg_dok, _, _, _, _ = bws.count(evaluations)
positions, sortedids, metrics, info = bws.rank(
agg_dok, method='ratio', avg='exist') | 625941be3cc13d1c6d3c728b |
def L_model_forward(X, parameters): <NEW_LINE> <INDENT> caches = [] <NEW_LINE> A = X <NEW_LINE> L = len(parameters) // 2 <NEW_LINE> for l in range(1, L): <NEW_LINE> <INDENT> A_prev = A <NEW_LINE> A, cache = linear_activation_forward(A_prev, parameters["W"+str(l)], parameters["b"+str(l)], "relu") <NEW_LINE> caches.append(cache) <NEW_LINE> <DEDENT> A, cache = linear_activation_forward(A_prev, parameters["W"+str(L)], parameters["b"+str(L)], "sigmoid") <NEW_LINE> caches.append(cache) <NEW_LINE> assert(AL.shape == (1,X.shape[1])) <NEW_LINE> return AL, caches | Implement forward propagation for the [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID computation
Arguments:
X -- data, numpy array of shape (input size, number of examples)
parameters -- output of initialize_parameters_deep()
Returns:
AL -- last post-activation value
caches -- list of caches containing:
every cache of linear_relu_forward() (there are L-1 of them, indexed from 0 to L-2)
the cache of linear_sigmoid_forward() (there is one, indexed L-1) | 625941be63f4b57ef0001030 |
def mapLighting(lightpos = (-10.99, 20.0, 20.0), mesh = None, res = (1024, 1024), border = 1): <NEW_LINE> <INDENT> if mh.hasRenderSkin(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return mapLightingGL(lightpos, mesh, res, border) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.debug(e) <NEW_LINE> log.debug("Hardware skin rendering failed, falling back to software render.") <NEW_LINE> return mapLightingSoft(lightpos, mesh, res, border) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return mapLightingSoft(lightpos, mesh, res, border) | Bake lightmap for human from one light.
Uses OpenGL hardware acceleration if the necessary OGL features are
available, otherwise uses a slower software rasterizer. | 625941be1f5feb6acb0c4a64 |
def test_mfit_6(): <NEW_LINE> <INDENT> image = numpy.ones((40,40)) <NEW_LINE> mfit = daoFitC.MultiFitter2D(sigma_range = [1.0, 2.0]) <NEW_LINE> mfit.initializeC(image) <NEW_LINE> mfit.newImage(image) <NEW_LINE> mfit.newBackground(image) <NEW_LINE> n_peaks = 5 <NEW_LINE> peaks = {"x" : numpy.ones(n_peaks) * 20.0, "y" : numpy.ones(n_peaks) * 20.0, "z" : numpy.zeros(n_peaks), "sigma" : 0.1*numpy.arange(n_peaks) + 1.0} <NEW_LINE> mfit.newPeaks(peaks, "testing") <NEW_LINE> mfit.removeErrorPeaks() <NEW_LINE> assert (mfit.getNFit() == 5) <NEW_LINE> status = mfit.getPeakProperty("status") <NEW_LINE> status[1] = iaUtilsC.ERROR <NEW_LINE> status[3] = iaUtilsC.ERROR <NEW_LINE> mfit.setPeakStatus(status) <NEW_LINE> mfit.removeErrorPeaks() <NEW_LINE> assert (mfit.getNFit() == 3) <NEW_LINE> w = mfit.getPeakProperty("xsigma") <NEW_LINE> assert numpy.allclose(w, numpy.array([1.0, 1.2, 1.4])) <NEW_LINE> mfit.cleanup(verbose = False) | Test marking peak status. | 625941bea8ecb033257d2fde |
def deal_extra_cards(self, deck): <NEW_LINE> <INDENT> if not deck.empty() and self.cards_in_grid() == DEAL: <NEW_LINE> <INDENT> for c in range(0, COL): <NEW_LINE> <INDENT> i = self.grid.index(None) <NEW_LINE> self.grid[i] = deck.deal_next_card() <NEW_LINE> self.place_a_card(self.grid[i], self.grid_to_xy(i)[0], self.grid_to_xy(i)[1]) | Add cards to the bottom row when there is no match.
But only if there are still cards in the deck
and only 12 cards in the grid | 625941bef9cc0f698b14050e |
def chess_control(self, event: tk.Event = None): <NEW_LINE> <INDENT> num = event.char <NEW_LINE> if num and num in "123456789": <NEW_LINE> <INDENT> if self.selected: <NEW_LINE> <INDENT> if self.original_state[self.selected[0]][self.selected[1]] == 0: <NEW_LINE> <INDENT> self.set_number(self.selected, int(num)) | When user press keyboard in canvas, this function will be called.
:param event: tk event, can be empty
:return: None | 625941bed6c5a10208143f58 |
def get_downloads_id_with_http_info(self, id, workspace_id, **kwargs): <NEW_LINE> <INDENT> all_params = ['id', 'workspace_id'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_downloads_id" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('id' not in params) or (params['id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `id` when calling `get_downloads_id`") <NEW_LINE> <DEDENT> if ('workspace_id' not in params) or (params['workspace_id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `workspace_id` when calling `get_downloads_id`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'id' in params: <NEW_LINE> <INDENT> path_params['id'] = params['id'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> if 'workspace_id' in params: <NEW_LINE> <INDENT> query_params.append(('workspace_id', params['workspace_id'])) <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> auth_settings = [] <NEW_LINE> return self.api_client.call_api('/downloads/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ExpiringUrl', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Get zip
Get a zip file of a previously created download object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_downloads_id_with_http_info(id, workspace_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: ID of download (required)
:param int workspace_id: ID of Workspace (required)
:return: ExpiringUrl
If the method is called asynchronously,
returns the request thread. | 625941be925a0f43d2549d84 |
def argmax( self, axis: Optional[int] = None, out: Optional[np.ndarray] = None ) -> np.ndarray: <NEW_LINE> <INDENT> return np.argmax(self.data, axis, out) | Returns the indices of the maximum values along an axis.
Parameters
----------
a: array_like
axis: int, optional
By default, the index is into the flattened array, otherwise along the specified axis.
out: numpy.array, optional
If provided, the result will be inserted into this array. It should be of the appropriate shape and dtype.
Returns
-------
numpy.ndarray[int] | 625941be91f36d47f21ac400 |
def __init__(self, coords0, coords1): <NEW_LINE> <INDENT> self.coords0 = coords0 <NEW_LINE> self.coords1 = coords1 | A line in WGS84 coordinate space
Parameters
----------
coords0: Coords object
Coordinates of start of line
coords1: Coords object
Coordinates of end of line | 625941be50485f2cf553cca8 |
def __init__(self, ideal_temperature=20): <NEW_LINE> <INDENT> self.ideal_temperature = ideal_temperature <NEW_LINE> self.last_temperature = ideal_temperature <NEW_LINE> self.temperature = ideal_temperature <NEW_LINE> self.is_heating = False | Some variables representing this agent's model of the universe | 625941be7047854f462a131c |
@cmdopts([ ('template=', 'T', 'Database template to use when creating new database, defaults to "template_postgis"'), ]) <NEW_LINE> @task <NEW_LINE> def create_db(options): <NEW_LINE> <INDENT> from geoevents import settings <NEW_LINE> template = options.get('template', 'template1') <NEW_LINE> database = settings.DATABASES.get('default').get('NAME') <NEW_LINE> sh('createdb {database} -T {template}'.format(database=database, template=template)) <NEW_LINE> sql = '"CREATE EXTENSION postgis; CREATE EXTENSION postgis_topology;"' <NEW_LINE> sh('psql -d {database} -c {sql}'.format(database=database, sql=sql)) | Creates the database in postgres. | 625941be377c676e912720b9 |
def _get_image_location(self, endpoint, v1image): <NEW_LINE> <INDENT> location = endpoint <NEW_LINE> if not location.endswith('/'): <NEW_LINE> <INDENT> location += '/' <NEW_LINE> <DEDENT> location += constants.IMAGE_LOCATION_PATH <NEW_LINE> location += v1image.id <NEW_LINE> return location | Return the image location for the specified image and endpoint.
:param: endpoint The v2 glance http client endpoint
:param: v1image The v1 image
:returns: The image location url | 625941befbf16365ca6f60ce |
def create_floppy(params, prepare=True): <NEW_LINE> <INDENT> error_context.context("creating test floppy", test.log.info) <NEW_LINE> floppy = params["floppy_name"] <NEW_LINE> if not os.path.isabs(floppy): <NEW_LINE> <INDENT> floppy = os.path.join(data_dir.get_data_dir(), floppy) <NEW_LINE> <DEDENT> if prepare: <NEW_LINE> <INDENT> process.run("dd if=/dev/zero of=%s bs=512 count=2880" % floppy) <NEW_LINE> <DEDENT> return floppy | Creates 'new' floppy with one file on it
:param params: parameters for test
:param preapre: if True then it prepare cd images.
:return: path to new floppy file. | 625941be796e427e537b04d4 |
def get_model(model_path, model_type='unet11'): <NEW_LINE> <INDENT> num_classes = 11 <NEW_LINE> if model_type == 'unet16': <NEW_LINE> <INDENT> model = UNet16(num_classes=num_classes) <NEW_LINE> <DEDENT> elif model_type == 'unet11': <NEW_LINE> <INDENT> model = UNet11(num_classes=num_classes) <NEW_LINE> <DEDENT> state = torch.load(str(model_path)) <NEW_LINE> state = {key.replace('module.', ''): value for key, value in state['model'].items()} <NEW_LINE> model.load_state_dict(state) <NEW_LINE> if torch.cuda.is_available(): <NEW_LINE> <INDENT> return model.cuda() <NEW_LINE> <DEDENT> model.eval() <NEW_LINE> return model | :param model_path:
:param model_type: 'UNet16', 'UNet11'
:return: | 625941be9f2886367277a7a0 |
@asyncio.coroutine <NEW_LINE> def test_async_get_all_descriptions(hass): <NEW_LINE> <INDENT> group = hass.components.group <NEW_LINE> group_config = {group.DOMAIN: {}} <NEW_LINE> yield from async_setup_component(hass, group.DOMAIN, group_config) <NEW_LINE> descriptions = yield from service.async_get_all_descriptions(hass) <NEW_LINE> assert len(descriptions) == 1 <NEW_LINE> assert "description" in descriptions["group"]["reload"] <NEW_LINE> assert "fields" in descriptions["group"]["reload"] <NEW_LINE> logger = hass.components.logger <NEW_LINE> logger_config = {logger.DOMAIN: {}} <NEW_LINE> yield from async_setup_component(hass, logger.DOMAIN, logger_config) <NEW_LINE> descriptions = yield from service.async_get_all_descriptions(hass) <NEW_LINE> assert len(descriptions) == 2 <NEW_LINE> assert "description" in descriptions[logger.DOMAIN]["set_level"] <NEW_LINE> assert "fields" in descriptions[logger.DOMAIN]["set_level"] | Test async_get_all_descriptions. | 625941beb57a9660fec33791 |
def parse_args(): <NEW_LINE> <INDENT> parser = ArgumentParser() <NEW_LINE> parser.add_argument("-f", "--funnels", help="The JSON file with the details about the funnels to be analyzed", required=True) <NEW_LINE> parser.add_argument("-k", "--key", help="The key to authenticate within Mixpanel", required=True) <NEW_LINE> parser.add_argument("-v", "--verbosity", action="count", default=0, help="increase output verbosity (max: -v)") <NEW_LINE> parser.add_argument("-o", "--output", help="The form in which you want to get the output", choices=["terminal", "csv", 'both'], default="terminal") <NEW_LINE> parser.add_argument("-of", "--output_format", help="The format in which the output will be visualized/recorded", choices=["long", 'short'], default="short") <NEW_LINE> parser.add_argument("-x", "--crossed_filters", action='store_true', help="Whether or not run with filters combinations") <NEW_LINE> return parser | parser.add_argument("-f", "--funnels", help="The JSON file with the details about the funnels to be analyzed",
required=True)
parser.add_argument("-k", "--key", help="The key to authenticate within Mixpanel",
required=True)
:return: | 625941bea934411ee37515a3 |
def test_update_data_until(code, end_year, end_month): <NEW_LINE> <INDENT> if not test_is_local_data_exist(code): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> date_str = test_get_local_date_last_date(code) <NEW_LINE> date_obj = datetime.datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S') <NEW_LINE> test_load_historical_data(code, date_obj.year, date_obj.month, end_year, end_month) <NEW_LINE> return True | Goal
----------
Update data until end date.
Parameters
----------
code : int or str
end_year : int
end_month : int
Return
----------
bool | 625941be71ff763f4b549598 |
def home(self): <NEW_LINE> <INDENT> self.state = "away" <NEW_LINE> for h in self.hostname: <NEW_LINE> <INDENT> response = os.system("ping -c 1 " + h) <NEW_LINE> if response == 0: <NEW_LINE> <INDENT> self.state = "here" <NEW_LINE> break | Check cellphones to see if anyone is home | 625941be85dfad0860c3ad6a |
@test_harness.test(['emulator']) <NEW_LINE> def read_valid_hex(*unused): <NEW_LINE> <INDENT> with SerialLoader('testhex.txt') as loader: <NEW_LINE> <INDENT> loader.expect_serial_bytes([PING_REQ]) <NEW_LINE> loader.send_serial([PING_ACK]) <NEW_LINE> check_load_memory_command(loader, 0, TESTHEX_CONTENTS) <NEW_LINE> loader.expect_serial_bytes([EXECUTE_REQ]) <NEW_LINE> loader.send_serial([EXECUTE_ACK]) <NEW_LINE> loader.expect_normal_exit() | Read a valid hex file.
The passed file exercises of valid syntactic constructs. | 625941be7b180e01f3dc4714 |
def __init__(self, url: str, cache_ttl: int) -> None: <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.cached_feed = getCachedFeed(url, cache_ttl) <NEW_LINE> try: <NEW_LINE> <INDENT> self.feed_size = len(self.cached_feed.get().entries) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValueError("Couldn't get from URL") | Initialize the class.
Args:
url -- The url to fetch from
cache_ttl -- How long to set the cache to live. Will be ignored if there's already a cache registered for this url. | 625941be8e71fb1e9831d6bb |
def _runAnDone(self, event): <NEW_LINE> <INDENT> self.tmp_maps.DeleteTmpMap( self.tmpInPts) <NEW_LINE> self.tmp_maps.DeleteTmpMap(self.tmpInPtsConnected) <NEW_LINE> try_remove(self.tmpPtsAsciiFile) <NEW_LINE> if event.cmd[0] == "v.net.flow": <NEW_LINE> <INDENT> self.tmp_maps.DeleteTmpMap(self.vnetFlowTmpCut) <NEW_LINE> <DEDENT> self._onDone(event) | Called when analysis is done | 625941bed58c6744b4257b71 |
def test_present(self): <NEW_LINE> <INDENT> docker_create_network = Mock(return_value='created') <NEW_LINE> docker_connect_container_to_network = Mock(return_value='connected') <NEW_LINE> docker_inspect_container = Mock(return_value={'Id': 'abcd'}) <NEW_LINE> __salt__ = {'docker.create_network': docker_create_network, 'docker.inspect_container': docker_inspect_container, 'docker.connect_container_to_network': docker_connect_container_to_network, 'docker.networks': Mock(return_value=[]), } <NEW_LINE> with patch.dict(docker_state.__dict__, {'__salt__': __salt__}): <NEW_LINE> <INDENT> ret = docker_state.present( 'network_foo', containers=['container'], ) <NEW_LINE> <DEDENT> docker_create_network.assert_called_with('network_foo', driver=None) <NEW_LINE> docker_connect_container_to_network.assert_called_with('abcd', 'network_foo') <NEW_LINE> self.assertEqual(ret, {'name': 'network_foo', 'comment': '', 'changes': {'connected': 'connected', 'created': 'created'}, 'result': True}) | Test docker_network.present | 625941bed164cc6175782c5e |
def next(self): <NEW_LINE> <INDENT> if not self.hasNext(): <NEW_LINE> <INDENT> return " " <NEW_LINE> <DEDENT> val = self.alphas[self.idx] <NEW_LINE> self.cnt -= 1 <NEW_LINE> if self.cnt == 0: <NEW_LINE> <INDENT> self.idx += 1 <NEW_LINE> if self.idx < len(self.reps): <NEW_LINE> <INDENT> self.cnt = self.reps[self.idx] <NEW_LINE> <DEDENT> <DEDENT> return val | :rtype: str | 625941becdde0d52a9e52f41 |
def get_item(sequence, indexes=(0, ), default=""): <NEW_LINE> <INDENT> if inspect.isgenerator(sequence): <NEW_LINE> <INDENT> return next(sequence) <NEW_LINE> <DEDENT> items = [] <NEW_LINE> for index in indexes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = sequence[index] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> item = default <NEW_LINE> <DEDENT> if len(indexes) == 1: <NEW_LINE> <INDENT> items = item <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> items.append(item) <NEW_LINE> <DEDENT> <DEDENT> return items | Retrives the items at the indexes in sequence
defaults to default if the item does not exist | 625941be090684286d50ebf3 |
def tags_published(): <NEW_LINE> <INDENT> from tagging.models import Tag <NEW_LINE> from zinnia.models.entry import Entry <NEW_LINE> tags_entry_published = Tag.objects.usage_for_queryset( Entry.published.all()) <NEW_LINE> return Tag.objects.filter(name__in=[t.name for t in tags_entry_published]) | Return the published tags. | 625941be4f6381625f11494e |
def __exit__(self, *args): <NEW_LINE> <INDENT> log.debug('Closing management database') <NEW_LINE> self.db.close() | Exit the runtime context, close database. | 625941be76d4e153a657ea41 |
def stop(self): <NEW_LINE> <INDENT> self._rflag = False <NEW_LINE> self._bthread.join() <NEW_LINE> if hasattr(self._block, '_old_set_block_size'): <NEW_LINE> <INDENT> self._block.set_block_size = self._block._old_set_block_size <NEW_LINE> delattr(self._block, '_old_set_block_size') | Stop retrieving data from the underlying 'DataBlock' | 625941be30bbd722463cbcd4 |
def testBasicOneFile(self): <NEW_LINE> <INDENT> cf1 = Configuration(['data/configuration_test/configurationtest_01.conf']) <NEW_LINE> self.assertEqual(len(cf1.sections()), 4) <NEW_LINE> ea = cf1.get_active_sections(CONFIG_EVENT_ANALYZERS) <NEW_LINE> self.assertEqual(len(ea), 1) <NEW_LINE> self.assertEqual(ea[0], (CONFIG_EVENT_ANALYZERS + '.mya1', 'mya1')) <NEW_LINE> opts = cf1.items(ea[0][0]) <NEW_LINE> self.assertEqual(len(opts), 3) <NEW_LINE> opts_dict = dict(opts) <NEW_LINE> self.assertEqual(opts_dict['enabled'],'all') <NEW_LINE> self.assertEqual(opts_dict['rule_file'],'myrules1') <NEW_LINE> self.assertEqual(opts_dict['class'], 'analyzer1.Analyzer1') <NEW_LINE> aa = cf1.get_active_sections(CONFIG_ALERT_ANALYZERS) <NEW_LINE> self.assertEqual(len(aa), 1) <NEW_LINE> self.assertEqual(aa[0], (CONFIG_ALERT_ANALYZERS + '.myaA', 'myaA')) <NEW_LINE> opts = cf1.items(aa[0][0]) <NEW_LINE> self.assertEqual(len(opts), 3) <NEW_LINE> opts_dict = dict(opts) <NEW_LINE> self.assertEqual(opts_dict['enabled'],'all') <NEW_LINE> self.assertEqual(opts_dict['rule_file'],'myrulesA') <NEW_LINE> self.assertEqual(opts_dict['class'], 'analyzerA.AnalyzerA') <NEW_LINE> al = cf1.get_active_sections(CONFIG_ALERT_LISTENERS, 'all') <NEW_LINE> self.assertEqual(len(al), 1) <NEW_LINE> self.assertEqual(al[0], (CONFIG_ALERT_LISTENERS + '.myl1', 'myl1')) <NEW_LINE> af = cf1.get_active_sections(CONFIG_ALERT_FILTERS, 'all') <NEW_LINE> self.assertEqual(len(af), 1) <NEW_LINE> self.assertEqual(af[0], (CONFIG_ALERT_FILTERS + '.myf1', 'myf1')) <NEW_LINE> return | Test if no files are specified
| 625941bee8904600ed9f1e3a |
def get_participants(): <NEW_LINE> <INDENT> return "do some magic!" | Get the total list of participants
Participants are returned as an object. (Present in the treaties, or not) # noqa: E501
:rtype: Participant | 625941be091ae35668666e74 |
def __init__(self, Ksqr=[1],sigma=[1],g=[1],y0=[0.,1.],n=3,t0=0,tend=1,h=0.01,filename='out.txt',name='HardWorkingStudent'): <NEW_LINE> <INDENT> super(workerSimple, self).__init__(Ksqr, sigma, g, y0, n, t0, tend, h) <NEW_LINE> self.f = open(filename, 'w') <NEW_LINE> self.filename = filename <NEW_LINE> self.name = name | The constructor to set up the right parameters and to create
the ode's | 625941be435de62698dfdb5f |
def add_server(hostname): <NEW_LINE> <INDENT> sql.get_conn().execute(model.imaging_servers.insert(), fqdn=hostname) | Configure data for a server running at |hostname|. | 625941be596a8972360899d4 |
def readAll(self): <NEW_LINE> <INDENT> with self.mutex: <NEW_LINE> <INDENT> if self._curOut: <NEW_LINE> <INDENT> self._curOut.flush() <NEW_LINE> <DEDENT> for fname in self.allFileNames(): <NEW_LINE> <INDENT> if os.path.exists(fname): <NEW_LINE> <INDENT> with open(fname, "rb") as fobj: <NEW_LINE> <INDENT> yield fobj.read() | Return all log records from all log files. | 625941be57b8e32f524833aa |
def chemical_potential(self, n, L): <NEW_LINE> <INDENT> tol = 0.0001 <NEW_LINE> mu_old = 0 <NEW_LINE> mu = 2 * c.h22m * np.pi * n <NEW_LINE> while (np.abs(mu - mu_old) > tol): <NEW_LINE> <INDENT> mu_old = mu <NEW_LINE> nu = self.get_imax(mu_old, L) <NEW_LINE> mu = self.calculate_chempot(n, nu, L) <NEW_LINE> <DEDENT> return mu | Find the 2D chemical potential self-consistently.
| 625941be24f1403a92600a79 |
def store_chunks(self, chunks): <NEW_LINE> <INDENT> self.sqlite.executemany( 'INSERT INTO sensordata(stamp, config_id, stream, result) ' 'VALUES(?, ?, "default", 3.14)', chunks, ) | For every chunk, stores a list of (chunk[0], chunk[1], 'default', 3.14)
elements in the database. | 625941be5510c4643540f2fc |
def is_planar(graph): <NEW_LINE> <INDENT> return planarity.PGraph(graph).is_planar() | Test planarity of graph. | 625941be1b99ca400220a9c1 |
def post(self, request): <NEW_LINE> <INDENT> serializer = serializers.HelloSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.data.get('name') <NEW_LINE> message = 'Hello {0}'.format(name) <NEW_LINE> return Response({'message' : message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | create a hello messgae with the name returned | 625941be24f1403a92600a7a |
def get_fieldsets(self, request, obj=None): <NEW_LINE> <INDENT> app_config_default = self._app_config_select(request, obj) <NEW_LINE> if app_config_default is None and request.method == 'GET': <NEW_LINE> <INDENT> return super(PostAdmin, self).get_fieldsets(request, obj) <NEW_LINE> <DEDENT> if not obj: <NEW_LINE> <INDENT> config = app_config_default <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> config = obj.app_config <NEW_LINE> <DEDENT> fsets = deepcopy(self._fieldsets) <NEW_LINE> if config: <NEW_LINE> <INDENT> if config.use_abstract: <NEW_LINE> <INDENT> fsets[0][1]['fields'].append('abstract') <NEW_LINE> <DEDENT> if not config.use_placeholder: <NEW_LINE> <INDENT> fsets[0][1]['fields'].append('post_text') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if get_setting('USE_ABSTRACT'): <NEW_LINE> <INDENT> fsets[0][1]['fields'].append('abstract') <NEW_LINE> <DEDENT> if not get_setting('USE_PLACEHOLDER'): <NEW_LINE> <INDENT> fsets[0][1]['fields'].append('post_text') <NEW_LINE> <DEDENT> <DEDENT> if get_setting('MULTISITE'): <NEW_LINE> <INDENT> fsets[1][1]['fields'][0].append('sites') <NEW_LINE> <DEDENT> if request.user.is_superuser: <NEW_LINE> <INDENT> fsets[1][1]['fields'][0].append('author') <NEW_LINE> <DEDENT> filter_function = get_setting('ADMIN_POST_FIELDSET_FILTER') <NEW_LINE> if callable(filter_function): <NEW_LINE> <INDENT> fsets = filter_function(fsets, request, obj=obj) <NEW_LINE> <DEDENT> return fsets | Customize the fieldsets according to the app settings
:param request: request
:param obj: post
:return: fieldsets configuration | 625941be60cbc95b062c6453 |
def create_user(self, username, email, password): <NEW_LINE> <INDENT> if app.database["Users"] and username in app.database["Users"]: <NEW_LINE> <INDENT> return (False, "Username exists!") <NEW_LINE> <DEDENT> self.emails = [x[0] for x in app.database['Users'].values()] <NEW_LINE> if app.database["Users"] and email in self.emails: <NEW_LINE> <INDENT> return (False, "Email exists!") <NEW_LINE> <DEDENT> self.new_user = User(username, email, password) <NEW_LINE> user_details = self.new_user.credentials() <NEW_LINE> app.database["Users"][self.new_user.username] = user_details <NEW_LINE> return (True, 'User creation successful!') | Creates and adds user to the app database.
Returns:
A tuple of (True, username) if success adding user,
(False, error) otherwise. | 625941be7cff6e4e81117896 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.